In [1]:
from google.colab import drive
drive.mount("/content/MyDrive")
Mounted at /content/MyDrive
In [2]:
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib.image import imread
from sklearn.model_selection import train_test_split
from scipy.stats import zscore
import tensorflow as tf
import keras
tf.__version__

from tensorflow.keras.utils import to_categorical

import cv2

%matplotlib inline

import warnings
warnings.filterwarnings('ignore')

Part A¶

Loading the data¶

In [3]:
df1=pd.read_csv('/content/MyDrive/MyDrive/Dataset/NN Project Data - Signal.csv')  #Reading the csv file
In [4]:
df1.head()  #Showing the first 5 datapoints
Out[4]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Signal_Strength
0 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 5
1 7.8 0.88 0.00 2.6 0.098 25.0 67.0 0.9968 3.20 0.68 9.8 5
2 7.8 0.76 0.04 2.3 0.092 15.0 54.0 0.9970 3.26 0.65 9.8 5
3 11.2 0.28 0.56 1.9 0.075 17.0 60.0 0.9980 3.16 0.58 9.8 6
4 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 5
In [5]:
df1.shape #checking the no of rows and columns in dataset
Out[5]:
(1599, 12)

There are 1599 rows and 12 columns

Data preprocessing¶

Check for missing value¶

In [6]:
df1.isnull().sum() #check for null values
Out[6]:
Parameter 1        0
Parameter 2        0
Parameter 3        0
Parameter 4        0
Parameter 5        0
Parameter 6        0
Parameter 7        0
Parameter 8        0
Parameter 9        0
Parameter 10       0
Parameter 11       0
Signal_Strength    0
dtype: int64
In [7]:
percent_missing = df1.isnull().sum() * 100 / len(df1)          #missing value percentage
missing_value_df = pd.DataFrame({'column_name': df1.columns,
                                 'percent_missing': percent_missing})
missing_value_df
Out[7]:
column_name percent_missing
Parameter 1 Parameter 1 0.0
Parameter 2 Parameter 2 0.0
Parameter 3 Parameter 3 0.0
Parameter 4 Parameter 4 0.0
Parameter 5 Parameter 5 0.0
Parameter 6 Parameter 6 0.0
Parameter 7 Parameter 7 0.0
Parameter 8 Parameter 8 0.0
Parameter 9 Parameter 9 0.0
Parameter 10 Parameter 10 0.0
Parameter 11 Parameter 11 0.0
Signal_Strength Signal_Strength 0.0

There are no null values in the dataset

In [8]:
df2=df1.copy(deep=True)
In [9]:
df2.info() #info of the complete dataset
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 1599 entries, 0 to 1598
Data columns (total 12 columns):
 #   Column           Non-Null Count  Dtype  
---  ------           --------------  -----  
 0   Parameter 1      1599 non-null   float64
 1   Parameter 2      1599 non-null   float64
 2   Parameter 3      1599 non-null   float64
 3   Parameter 4      1599 non-null   float64
 4   Parameter 5      1599 non-null   float64
 5   Parameter 6      1599 non-null   float64
 6   Parameter 7      1599 non-null   float64
 7   Parameter 8      1599 non-null   float64
 8   Parameter 9      1599 non-null   float64
 9   Parameter 10     1599 non-null   float64
 10  Parameter 11     1599 non-null   float64
 11  Signal_Strength  1599 non-null   int64  
dtypes: float64(11), int64(1)
memory usage: 150.0 KB
In [10]:
df2.duplicated().sum()
Out[10]:
240

As per the given dataset it is possible to contain duplicate values. Hence further imputations are not performed.

Data Visualization¶

In [11]:
df2.describe().T #Statistical summary
Out[11]:
count mean std min 25% 50% 75% max
Parameter 1 1599.0 8.319637 1.741096 4.60000 7.1000 7.90000 9.200000 15.90000
Parameter 2 1599.0 0.527821 0.179060 0.12000 0.3900 0.52000 0.640000 1.58000
Parameter 3 1599.0 0.270976 0.194801 0.00000 0.0900 0.26000 0.420000 1.00000
Parameter 4 1599.0 2.538806 1.409928 0.90000 1.9000 2.20000 2.600000 15.50000
Parameter 5 1599.0 0.087467 0.047065 0.01200 0.0700 0.07900 0.090000 0.61100
Parameter 6 1599.0 15.874922 10.460157 1.00000 7.0000 14.00000 21.000000 72.00000
Parameter 7 1599.0 46.467792 32.895324 6.00000 22.0000 38.00000 62.000000 289.00000
Parameter 8 1599.0 0.996747 0.001887 0.99007 0.9956 0.99675 0.997835 1.00369
Parameter 9 1599.0 3.311113 0.154386 2.74000 3.2100 3.31000 3.400000 4.01000
Parameter 10 1599.0 0.658149 0.169507 0.33000 0.5500 0.62000 0.730000 2.00000
Parameter 11 1599.0 10.422983 1.065668 8.40000 9.5000 10.20000 11.100000 14.90000
Signal_Strength 1599.0 5.636023 0.807569 3.00000 5.0000 6.00000 6.000000 8.00000
In [12]:
sns.countplot(data=df2,x='Signal_Strength');

Signal 5 and 6 has higher counts

In [13]:
correlation_values=df2.corr()['Signal_Strength']         #Correlation of features with target variable
correlation_values.abs().sort_values(ascending=False)
Out[13]:
Signal_Strength    1.000000
Parameter 11       0.476166
Parameter 2        0.390558
Parameter 10       0.251397
Parameter 3        0.226373
Parameter 7        0.185100
Parameter 8        0.174919
Parameter 5        0.128907
Parameter 1        0.124052
Parameter 9        0.057731
Parameter 6        0.050656
Parameter 4        0.013732
Name: Signal_Strength, dtype: float64
In [14]:
plt.figure(figsize = (15,7))
sns.heatmap(df2.corr(), cmap='plasma',annot=True, fmt='.2f');
In [15]:
plt.figure(figsize=(20,6));

plt.subplot(1,3,1);
sns.distplot(df2['Parameter 7'],color='green');
plt.title('Parameter 7')

plt.subplot(1,3,2);
sns.distplot(df2['Parameter 9'],color='blue');
plt.title('Parameter 9')

plt.subplot(1,3,3);
sns.distplot(df2['Parameter 11'],color='red');
plt.title('Parameter 11')

plt.figure(figsize=(20,6));

plt.subplot(1,3,1);
sns.boxplot(y=df2['Parameter 5'],color='yellow');
plt.title('Parameter 5')

plt.subplot(1,3,2);
sns.boxplot(y=df2['Parameter 6'],color='orange');
plt.title('Parameter 6')

plt.subplot(1,3,3);
sns.boxplot(y=df2['Signal_Strength'],color='red');
plt.title('Signal_Strength')
Out[15]:
Text(0.5, 1.0, 'Signal_Strength')
In [16]:
plt.figure(figsize=(20,6));

plt.subplot(1,3,1);
sns.distplot(df2['Parameter 5'],color='green');
plt.title('Parameter 5')

plt.subplot(1,3,2);
sns.distplot(df2['Parameter 6'],color='blue');
plt.title('Parameter 6')

plt.subplot(1,3,3);
sns.distplot(df2['Signal_Strength'],color='red');
plt.title('Signal_Strength')

plt.figure(figsize=(20,6));

plt.subplot(1,3,1);
sns.boxplot(y=df2['Parameter 7'],color='yellow');
plt.title('Parameter 7')

plt.subplot(1,3,2);
sns.boxplot(y=df2['Parameter 9'],color='orange');
plt.title('Parameter 9')

plt.subplot(1,3,3);
sns.boxplot(y=df2['Parameter 11'],color='red');
plt.title('Parameter 11')
Out[16]:
Text(0.5, 1.0, 'Parameter 11')

The above plots presents the distribution and boxplots for few features in the given dataset

In [17]:
sns.scatterplot(x=df2['Parameter 3'],y=df2['Parameter 1'], hue=df2['Signal_Strength']);
In [18]:
sns.scatterplot(x=df2['Parameter 9'],y=df2['Parameter 1'], hue=df2['Signal_Strength']);
In [19]:
sns.scatterplot(x=df2['Parameter 11'],y=df2['Parameter 8'], hue=df2['Signal_Strength']);
In [20]:
sns.scatterplot(x=df2['Parameter 3'],y=df2['Parameter 9'], hue=df2['Signal_Strength']);

The above scatterplots shows relation between the parameters in given dataset

In [21]:
sns.stripplot(data=df2,y='Parameter 5',x='Signal_Strength');
In [22]:
sns.stripplot(data=df2,y='Parameter 6',x='Signal_Strength');

The strip plot shows that different parameters emits varying signals

In [23]:
sns.jointplot(data=df2,x='Parameter 3',y='Parameter 1',kind='hex');
In [24]:
sns.jointplot(data=df2,x='Parameter 6',y='Parameter 7',hue='Signal_Strength');

A few insights from the above plots:

  1. The parameters have +ve and -ve correlations with the other parameters as well as target variable
  2. A few parameters follow the normal distribution while others do not
  3. The Signal strength value of 5 ad 6 are more common than others.
In [25]:
sns.pairplot(data=df2, diag_kind='kde');  #Pair plot

Classification model¶

In [26]:
x=df2.drop('Signal_Strength',axis=1)
y=df2['Signal_Strength']
In [27]:
x_train, x_test, y_train, y_test = train_test_split(x,y, test_size=0.30, random_state=1)  #Split the data into train and test data of 70:30 ratio
print('X Train set contains {} data'.format(x_train.shape))
print('X Test set contains {} data'.format(x_test.shape))
print('Y Train set contains {} data'.format(y_train.shape))
print('Y Test set contains {} data'.format(y_test.shape))
X Train set contains (1119, 11) data
X Test set contains (480, 11) data
Y Train set contains (1119,) data
Y Test set contains (480,) data

The train and test data numbers are in sync

In [28]:
x_train=x_train.apply(zscore) #Normalize the data
x_test=x_test.apply(zscore)
In [29]:
num_classes = 10       #convert the target variable to one hot vectors
y_train_cat = to_categorical(y_train, num_classes)
y_test_cat=to_categorical(y_test,num_classes)
In [30]:
print("First 5 training lables as one-hot encoded vectors:\n", y_train_cat[:5])
First 5 training lables as one-hot encoded vectors:
 [[0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]
 [0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]
 [0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]
 [0. 0. 0. 0. 0. 0. 1. 0. 0. 0.]
 [0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]]
In [31]:
from keras import losses
from keras import optimizers
from keras.models import Sequential
from keras.layers import Dense,LeakyReLU

# create model
model = Sequential()

model.add(Dense(128, activation='relu',kernel_initializer='normal',input_shape=(11,))) ###Multiple Dense layers with Relu activation
model.add(Dense(64, activation='relu',kernel_initializer='normal'))
model.add(Dense(32, activation='relu',kernel_initializer='normal'))
model.add(LeakyReLU(alpha=0.1))
model.add(Dense(16, activation='relu',kernel_initializer='normal'))
model.add(LeakyReLU(alpha=0.1))

model.add(Dense(num_classes, activation='softmax')) ### For multiclass classification Softmax activation function is used
In [32]:
adam = optimizers.Adam(learning_rate=1e-3)
model.compile(loss='mean_absolute_error', optimizer=adam, metrics=['accuracy']) ### Loss function = MSE
In [33]:
model.summary() #Summary of the neural network model
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense (Dense)               (None, 128)               1536      
                                                                 
 dense_1 (Dense)             (None, 64)                8256      
                                                                 
 dense_2 (Dense)             (None, 32)                2080      
                                                                 
 leaky_re_lu (LeakyReLU)     (None, 32)                0         
                                                                 
 dense_3 (Dense)             (None, 16)                528       
                                                                 
 leaky_re_lu_1 (LeakyReLU)   (None, 16)                0         
                                                                 
 dense_4 (Dense)             (None, 10)                170       
                                                                 
=================================================================
Total params: 12,570
Trainable params: 12,570
Non-trainable params: 0
_________________________________________________________________
In [34]:
# Fit the model
history=model.fit(x_train, y_train_cat, validation_data=(x_test,y_test_cat), epochs=400, batch_size=200, verbose=2)
Epoch 1/400
6/6 - 4s - loss: 0.1798 - accuracy: 0.3816 - val_loss: 0.1796 - val_accuracy: 0.4042 - 4s/epoch - 623ms/step
Epoch 2/400
6/6 - 0s - loss: 0.1794 - accuracy: 0.3959 - val_loss: 0.1789 - val_accuracy: 0.4062 - 50ms/epoch - 8ms/step
Epoch 3/400
6/6 - 0s - loss: 0.1785 - accuracy: 0.3959 - val_loss: 0.1777 - val_accuracy: 0.4062 - 59ms/epoch - 10ms/step
Epoch 4/400
6/6 - 0s - loss: 0.1770 - accuracy: 0.3959 - val_loss: 0.1752 - val_accuracy: 0.4062 - 49ms/epoch - 8ms/step
Epoch 5/400
6/6 - 0s - loss: 0.1734 - accuracy: 0.3959 - val_loss: 0.1693 - val_accuracy: 0.4062 - 60ms/epoch - 10ms/step
Epoch 6/400
6/6 - 0s - loss: 0.1652 - accuracy: 0.3959 - val_loss: 0.1560 - val_accuracy: 0.4062 - 59ms/epoch - 10ms/step
Epoch 7/400
6/6 - 0s - loss: 0.1489 - accuracy: 0.3959 - val_loss: 0.1353 - val_accuracy: 0.4062 - 64ms/epoch - 11ms/step
Epoch 8/400
6/6 - 0s - loss: 0.1298 - accuracy: 0.3959 - val_loss: 0.1217 - val_accuracy: 0.4062 - 58ms/epoch - 10ms/step
Epoch 9/400
6/6 - 0s - loss: 0.1219 - accuracy: 0.3959 - val_loss: 0.1185 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 10/400
6/6 - 0s - loss: 0.1200 - accuracy: 0.3959 - val_loss: 0.1176 - val_accuracy: 0.4062 - 43ms/epoch - 7ms/step
Epoch 11/400
6/6 - 0s - loss: 0.1192 - accuracy: 0.3959 - val_loss: 0.1163 - val_accuracy: 0.4062 - 57ms/epoch - 10ms/step
Epoch 12/400
6/6 - 0s - loss: 0.1176 - accuracy: 0.3959 - val_loss: 0.1136 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 13/400
6/6 - 0s - loss: 0.1146 - accuracy: 0.3959 - val_loss: 0.1113 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 14/400
6/6 - 0s - loss: 0.1136 - accuracy: 0.4307 - val_loss: 0.1098 - val_accuracy: 0.4854 - 52ms/epoch - 9ms/step
Epoch 15/400
6/6 - 0s - loss: 0.1115 - accuracy: 0.4227 - val_loss: 0.1075 - val_accuracy: 0.4062 - 55ms/epoch - 9ms/step
Epoch 16/400
6/6 - 0s - loss: 0.1094 - accuracy: 0.4173 - val_loss: 0.1039 - val_accuracy: 0.5021 - 45ms/epoch - 7ms/step
Epoch 17/400
6/6 - 0s - loss: 0.1058 - accuracy: 0.5362 - val_loss: 0.0987 - val_accuracy: 0.6104 - 44ms/epoch - 7ms/step
Epoch 18/400
6/6 - 0s - loss: 0.1008 - accuracy: 0.5702 - val_loss: 0.0932 - val_accuracy: 0.6146 - 44ms/epoch - 7ms/step
Epoch 19/400
6/6 - 0s - loss: 0.0956 - accuracy: 0.5719 - val_loss: 0.0877 - val_accuracy: 0.5917 - 43ms/epoch - 7ms/step
Epoch 20/400
6/6 - 0s - loss: 0.0912 - accuracy: 0.5710 - val_loss: 0.0845 - val_accuracy: 0.6000 - 70ms/epoch - 12ms/step
Epoch 21/400
6/6 - 0s - loss: 0.0883 - accuracy: 0.5827 - val_loss: 0.0828 - val_accuracy: 0.6062 - 59ms/epoch - 10ms/step
Epoch 22/400
6/6 - 0s - loss: 0.0865 - accuracy: 0.5836 - val_loss: 0.0821 - val_accuracy: 0.6042 - 56ms/epoch - 9ms/step
Epoch 23/400
6/6 - 0s - loss: 0.0854 - accuracy: 0.5889 - val_loss: 0.0818 - val_accuracy: 0.5979 - 44ms/epoch - 7ms/step
Epoch 24/400
6/6 - 0s - loss: 0.0844 - accuracy: 0.5880 - val_loss: 0.0815 - val_accuracy: 0.5958 - 46ms/epoch - 8ms/step
Epoch 25/400
6/6 - 0s - loss: 0.0838 - accuracy: 0.5871 - val_loss: 0.0816 - val_accuracy: 0.5958 - 46ms/epoch - 8ms/step
Epoch 26/400
6/6 - 0s - loss: 0.0832 - accuracy: 0.5925 - val_loss: 0.0818 - val_accuracy: 0.5875 - 61ms/epoch - 10ms/step
Epoch 27/400
6/6 - 0s - loss: 0.0829 - accuracy: 0.5898 - val_loss: 0.0814 - val_accuracy: 0.5917 - 62ms/epoch - 10ms/step
Epoch 28/400
6/6 - 0s - loss: 0.0823 - accuracy: 0.5943 - val_loss: 0.0822 - val_accuracy: 0.5771 - 63ms/epoch - 10ms/step
Epoch 29/400
6/6 - 0s - loss: 0.0820 - accuracy: 0.5952 - val_loss: 0.0819 - val_accuracy: 0.5854 - 60ms/epoch - 10ms/step
Epoch 30/400
6/6 - 0s - loss: 0.0818 - accuracy: 0.6014 - val_loss: 0.0801 - val_accuracy: 0.6083 - 56ms/epoch - 9ms/step
Epoch 31/400
6/6 - 0s - loss: 0.0814 - accuracy: 0.5996 - val_loss: 0.0802 - val_accuracy: 0.5958 - 45ms/epoch - 8ms/step
Epoch 32/400
6/6 - 0s - loss: 0.0815 - accuracy: 0.5979 - val_loss: 0.0812 - val_accuracy: 0.5896 - 63ms/epoch - 10ms/step
Epoch 33/400
6/6 - 0s - loss: 0.0811 - accuracy: 0.5996 - val_loss: 0.0803 - val_accuracy: 0.6021 - 49ms/epoch - 8ms/step
Epoch 34/400
6/6 - 0s - loss: 0.0804 - accuracy: 0.6077 - val_loss: 0.0805 - val_accuracy: 0.5958 - 45ms/epoch - 8ms/step
Epoch 35/400
6/6 - 0s - loss: 0.0805 - accuracy: 0.6041 - val_loss: 0.0805 - val_accuracy: 0.5958 - 61ms/epoch - 10ms/step
Epoch 36/400
6/6 - 0s - loss: 0.0798 - accuracy: 0.6148 - val_loss: 0.0795 - val_accuracy: 0.6042 - 49ms/epoch - 8ms/step
Epoch 37/400
6/6 - 0s - loss: 0.0799 - accuracy: 0.6095 - val_loss: 0.0791 - val_accuracy: 0.6104 - 59ms/epoch - 10ms/step
Epoch 38/400
6/6 - 0s - loss: 0.0795 - accuracy: 0.6095 - val_loss: 0.0794 - val_accuracy: 0.6062 - 58ms/epoch - 10ms/step
Epoch 39/400
6/6 - 0s - loss: 0.0788 - accuracy: 0.6166 - val_loss: 0.0795 - val_accuracy: 0.6083 - 53ms/epoch - 9ms/step
Epoch 40/400
6/6 - 0s - loss: 0.0786 - accuracy: 0.6193 - val_loss: 0.0798 - val_accuracy: 0.6062 - 59ms/epoch - 10ms/step
Epoch 41/400
6/6 - 0s - loss: 0.0782 - accuracy: 0.6175 - val_loss: 0.0798 - val_accuracy: 0.6000 - 60ms/epoch - 10ms/step
Epoch 42/400
6/6 - 0s - loss: 0.0784 - accuracy: 0.6139 - val_loss: 0.0795 - val_accuracy: 0.6000 - 45ms/epoch - 8ms/step
Epoch 43/400
6/6 - 0s - loss: 0.0777 - accuracy: 0.6202 - val_loss: 0.0788 - val_accuracy: 0.6125 - 44ms/epoch - 7ms/step
Epoch 44/400
6/6 - 0s - loss: 0.0775 - accuracy: 0.6220 - val_loss: 0.0792 - val_accuracy: 0.6042 - 43ms/epoch - 7ms/step
Epoch 45/400
6/6 - 0s - loss: 0.0773 - accuracy: 0.6202 - val_loss: 0.0790 - val_accuracy: 0.6083 - 44ms/epoch - 7ms/step
Epoch 46/400
6/6 - 0s - loss: 0.0770 - accuracy: 0.6229 - val_loss: 0.0784 - val_accuracy: 0.6146 - 60ms/epoch - 10ms/step
Epoch 47/400
6/6 - 0s - loss: 0.0769 - accuracy: 0.6220 - val_loss: 0.0787 - val_accuracy: 0.6083 - 43ms/epoch - 7ms/step
Epoch 48/400
6/6 - 0s - loss: 0.0766 - accuracy: 0.6229 - val_loss: 0.0782 - val_accuracy: 0.6125 - 44ms/epoch - 7ms/step
Epoch 49/400
6/6 - 0s - loss: 0.0763 - accuracy: 0.6273 - val_loss: 0.0783 - val_accuracy: 0.6125 - 56ms/epoch - 9ms/step
Epoch 50/400
6/6 - 0s - loss: 0.0760 - accuracy: 0.6256 - val_loss: 0.0788 - val_accuracy: 0.6062 - 45ms/epoch - 7ms/step
Epoch 51/400
6/6 - 0s - loss: 0.0756 - accuracy: 0.6282 - val_loss: 0.0786 - val_accuracy: 0.6104 - 64ms/epoch - 11ms/step
Epoch 52/400
6/6 - 0s - loss: 0.0753 - accuracy: 0.6309 - val_loss: 0.0786 - val_accuracy: 0.6104 - 44ms/epoch - 7ms/step
Epoch 53/400
6/6 - 0s - loss: 0.0752 - accuracy: 0.6291 - val_loss: 0.0788 - val_accuracy: 0.6083 - 44ms/epoch - 7ms/step
Epoch 54/400
6/6 - 0s - loss: 0.0749 - accuracy: 0.6327 - val_loss: 0.0783 - val_accuracy: 0.6083 - 65ms/epoch - 11ms/step
Epoch 55/400
6/6 - 0s - loss: 0.0748 - accuracy: 0.6345 - val_loss: 0.0784 - val_accuracy: 0.6083 - 42ms/epoch - 7ms/step
Epoch 56/400
6/6 - 0s - loss: 0.0745 - accuracy: 0.6345 - val_loss: 0.0783 - val_accuracy: 0.6042 - 43ms/epoch - 7ms/step
Epoch 57/400
6/6 - 0s - loss: 0.0743 - accuracy: 0.6354 - val_loss: 0.0784 - val_accuracy: 0.6104 - 44ms/epoch - 7ms/step
Epoch 58/400
6/6 - 0s - loss: 0.0745 - accuracy: 0.6336 - val_loss: 0.0780 - val_accuracy: 0.6104 - 58ms/epoch - 10ms/step
Epoch 59/400
6/6 - 0s - loss: 0.0743 - accuracy: 0.6354 - val_loss: 0.0780 - val_accuracy: 0.6125 - 43ms/epoch - 7ms/step
Epoch 60/400
6/6 - 0s - loss: 0.0742 - accuracy: 0.6354 - val_loss: 0.0781 - val_accuracy: 0.6104 - 58ms/epoch - 10ms/step
Epoch 61/400
6/6 - 0s - loss: 0.0741 - accuracy: 0.6354 - val_loss: 0.0778 - val_accuracy: 0.6146 - 74ms/epoch - 12ms/step
Epoch 62/400
6/6 - 0s - loss: 0.0740 - accuracy: 0.6363 - val_loss: 0.0779 - val_accuracy: 0.6125 - 63ms/epoch - 11ms/step
Epoch 63/400
6/6 - 0s - loss: 0.0739 - accuracy: 0.6354 - val_loss: 0.0780 - val_accuracy: 0.6104 - 59ms/epoch - 10ms/step
Epoch 64/400
6/6 - 0s - loss: 0.0736 - accuracy: 0.6354 - val_loss: 0.0778 - val_accuracy: 0.6125 - 61ms/epoch - 10ms/step
Epoch 65/400
6/6 - 0s - loss: 0.0736 - accuracy: 0.6363 - val_loss: 0.0776 - val_accuracy: 0.6167 - 72ms/epoch - 12ms/step
Epoch 66/400
6/6 - 0s - loss: 0.0734 - accuracy: 0.6372 - val_loss: 0.0776 - val_accuracy: 0.6146 - 84ms/epoch - 14ms/step
Epoch 67/400
6/6 - 0s - loss: 0.0736 - accuracy: 0.6345 - val_loss: 0.0771 - val_accuracy: 0.6187 - 71ms/epoch - 12ms/step
Epoch 68/400
6/6 - 0s - loss: 0.0734 - accuracy: 0.6372 - val_loss: 0.0777 - val_accuracy: 0.6104 - 69ms/epoch - 11ms/step
Epoch 69/400
6/6 - 0s - loss: 0.0734 - accuracy: 0.6354 - val_loss: 0.0776 - val_accuracy: 0.6187 - 69ms/epoch - 12ms/step
Epoch 70/400
6/6 - 0s - loss: 0.0733 - accuracy: 0.6372 - val_loss: 0.0773 - val_accuracy: 0.6104 - 57ms/epoch - 10ms/step
Epoch 71/400
6/6 - 0s - loss: 0.0733 - accuracy: 0.6363 - val_loss: 0.0772 - val_accuracy: 0.6167 - 70ms/epoch - 12ms/step
Epoch 72/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6372 - val_loss: 0.0776 - val_accuracy: 0.6167 - 69ms/epoch - 12ms/step
Epoch 73/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6372 - val_loss: 0.0775 - val_accuracy: 0.6104 - 69ms/epoch - 11ms/step
Epoch 74/400
6/6 - 0s - loss: 0.0732 - accuracy: 0.6372 - val_loss: 0.0771 - val_accuracy: 0.6125 - 73ms/epoch - 12ms/step
Epoch 75/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6372 - val_loss: 0.0772 - val_accuracy: 0.6187 - 76ms/epoch - 13ms/step
Epoch 76/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6372 - val_loss: 0.0770 - val_accuracy: 0.6146 - 71ms/epoch - 12ms/step
Epoch 77/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6381 - val_loss: 0.0771 - val_accuracy: 0.6187 - 72ms/epoch - 12ms/step
Epoch 78/400
6/6 - 0s - loss: 0.0729 - accuracy: 0.6381 - val_loss: 0.0770 - val_accuracy: 0.6146 - 72ms/epoch - 12ms/step
Epoch 79/400
6/6 - 0s - loss: 0.0730 - accuracy: 0.6381 - val_loss: 0.0771 - val_accuracy: 0.6125 - 70ms/epoch - 12ms/step
Epoch 80/400
6/6 - 0s - loss: 0.0729 - accuracy: 0.6381 - val_loss: 0.0769 - val_accuracy: 0.6125 - 72ms/epoch - 12ms/step
Epoch 81/400
6/6 - 0s - loss: 0.0728 - accuracy: 0.6381 - val_loss: 0.0768 - val_accuracy: 0.6208 - 66ms/epoch - 11ms/step
Epoch 82/400
6/6 - 0s - loss: 0.0727 - accuracy: 0.6381 - val_loss: 0.0768 - val_accuracy: 0.6146 - 73ms/epoch - 12ms/step
Epoch 83/400
6/6 - 0s - loss: 0.0727 - accuracy: 0.6381 - val_loss: 0.0770 - val_accuracy: 0.6167 - 82ms/epoch - 14ms/step
Epoch 84/400
6/6 - 0s - loss: 0.0727 - accuracy: 0.6381 - val_loss: 0.0770 - val_accuracy: 0.6125 - 70ms/epoch - 12ms/step
Epoch 85/400
6/6 - 0s - loss: 0.0726 - accuracy: 0.6381 - val_loss: 0.0770 - val_accuracy: 0.6167 - 58ms/epoch - 10ms/step
Epoch 86/400
6/6 - 0s - loss: 0.0726 - accuracy: 0.6381 - val_loss: 0.0770 - val_accuracy: 0.6187 - 69ms/epoch - 12ms/step
Epoch 87/400
6/6 - 0s - loss: 0.0726 - accuracy: 0.6381 - val_loss: 0.0770 - val_accuracy: 0.6167 - 60ms/epoch - 10ms/step
Epoch 88/400
6/6 - 0s - loss: 0.0726 - accuracy: 0.6381 - val_loss: 0.0771 - val_accuracy: 0.6167 - 71ms/epoch - 12ms/step
Epoch 89/400
6/6 - 0s - loss: 0.0726 - accuracy: 0.6381 - val_loss: 0.0770 - val_accuracy: 0.6187 - 69ms/epoch - 11ms/step
Epoch 90/400
6/6 - 0s - loss: 0.0725 - accuracy: 0.6381 - val_loss: 0.0769 - val_accuracy: 0.6167 - 63ms/epoch - 10ms/step
Epoch 91/400
6/6 - 0s - loss: 0.0725 - accuracy: 0.6390 - val_loss: 0.0768 - val_accuracy: 0.6187 - 72ms/epoch - 12ms/step
Epoch 92/400
6/6 - 0s - loss: 0.0725 - accuracy: 0.6390 - val_loss: 0.0770 - val_accuracy: 0.6187 - 72ms/epoch - 12ms/step
Epoch 93/400
6/6 - 0s - loss: 0.0725 - accuracy: 0.6390 - val_loss: 0.0766 - val_accuracy: 0.6208 - 76ms/epoch - 13ms/step
Epoch 94/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6390 - val_loss: 0.0766 - val_accuracy: 0.6229 - 73ms/epoch - 12ms/step
Epoch 95/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6390 - val_loss: 0.0766 - val_accuracy: 0.6208 - 76ms/epoch - 13ms/step
Epoch 96/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6390 - val_loss: 0.0767 - val_accuracy: 0.6208 - 68ms/epoch - 11ms/step
Epoch 97/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6390 - val_loss: 0.0766 - val_accuracy: 0.6208 - 72ms/epoch - 12ms/step
Epoch 98/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6390 - val_loss: 0.0767 - val_accuracy: 0.6187 - 68ms/epoch - 11ms/step
Epoch 99/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6390 - val_loss: 0.0765 - val_accuracy: 0.6187 - 77ms/epoch - 13ms/step
Epoch 100/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6390 - val_loss: 0.0766 - val_accuracy: 0.6187 - 66ms/epoch - 11ms/step
Epoch 101/400
6/6 - 0s - loss: 0.0723 - accuracy: 0.6390 - val_loss: 0.0768 - val_accuracy: 0.6167 - 78ms/epoch - 13ms/step
Epoch 102/400
6/6 - 0s - loss: 0.0723 - accuracy: 0.6390 - val_loss: 0.0766 - val_accuracy: 0.6208 - 72ms/epoch - 12ms/step
Epoch 103/400
6/6 - 0s - loss: 0.0723 - accuracy: 0.6390 - val_loss: 0.0766 - val_accuracy: 0.6208 - 59ms/epoch - 10ms/step
Epoch 104/400
6/6 - 0s - loss: 0.0723 - accuracy: 0.6390 - val_loss: 0.0767 - val_accuracy: 0.6187 - 70ms/epoch - 12ms/step
Epoch 105/400
6/6 - 0s - loss: 0.0723 - accuracy: 0.6390 - val_loss: 0.0766 - val_accuracy: 0.6187 - 73ms/epoch - 12ms/step
Epoch 106/400
6/6 - 0s - loss: 0.0722 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 75ms/epoch - 12ms/step
Epoch 107/400
6/6 - 0s - loss: 0.0722 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6167 - 47ms/epoch - 8ms/step
Epoch 108/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6146 - 44ms/epoch - 7ms/step
Epoch 109/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6167 - 43ms/epoch - 7ms/step
Epoch 110/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 57ms/epoch - 9ms/step
Epoch 111/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 112/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 58ms/epoch - 10ms/step
Epoch 113/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 45ms/epoch - 7ms/step
Epoch 114/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0772 - val_accuracy: 0.6146 - 43ms/epoch - 7ms/step
Epoch 115/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 42ms/epoch - 7ms/step
Epoch 116/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 117/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 49ms/epoch - 8ms/step
Epoch 118/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 59ms/epoch - 10ms/step
Epoch 119/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 44ms/epoch - 7ms/step
Epoch 120/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 121/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 43ms/epoch - 7ms/step
Epoch 122/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 43ms/epoch - 7ms/step
Epoch 123/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 57ms/epoch - 9ms/step
Epoch 124/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 56ms/epoch - 9ms/step
Epoch 125/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0771 - val_accuracy: 0.6146 - 47ms/epoch - 8ms/step
Epoch 126/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 127/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0767 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 128/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 43ms/epoch - 7ms/step
Epoch 129/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 51ms/epoch - 8ms/step
Epoch 130/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0767 - val_accuracy: 0.6187 - 59ms/epoch - 10ms/step
Epoch 131/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 48ms/epoch - 8ms/step
Epoch 132/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 60ms/epoch - 10ms/step
Epoch 133/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6167 - 57ms/epoch - 9ms/step
Epoch 134/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 62ms/epoch - 10ms/step
Epoch 135/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 52ms/epoch - 9ms/step
Epoch 136/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 46ms/epoch - 8ms/step
Epoch 137/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6167 - 46ms/epoch - 8ms/step
Epoch 138/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 47ms/epoch - 8ms/step
Epoch 139/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6167 - 59ms/epoch - 10ms/step
Epoch 140/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 47ms/epoch - 8ms/step
Epoch 141/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 142/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 47ms/epoch - 8ms/step
Epoch 143/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 58ms/epoch - 10ms/step
Epoch 144/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 44ms/epoch - 7ms/step
Epoch 145/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 44ms/epoch - 7ms/step
Epoch 146/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 57ms/epoch - 9ms/step
Epoch 147/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0767 - val_accuracy: 0.6187 - 57ms/epoch - 9ms/step
Epoch 148/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 58ms/epoch - 10ms/step
Epoch 149/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 43ms/epoch - 7ms/step
Epoch 150/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 151/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 43ms/epoch - 7ms/step
Epoch 152/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 153/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 57ms/epoch - 9ms/step
Epoch 154/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 47ms/epoch - 8ms/step
Epoch 155/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 43ms/epoch - 7ms/step
Epoch 156/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 59ms/epoch - 10ms/step
Epoch 157/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 158/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 159/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 160/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 43ms/epoch - 7ms/step
Epoch 161/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 47ms/epoch - 8ms/step
Epoch 162/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 59ms/epoch - 10ms/step
Epoch 163/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 57ms/epoch - 9ms/step
Epoch 164/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6167 - 60ms/epoch - 10ms/step
Epoch 165/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6167 - 60ms/epoch - 10ms/step
Epoch 166/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 67ms/epoch - 11ms/step
Epoch 167/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 44ms/epoch - 7ms/step
Epoch 168/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 169/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 63ms/epoch - 11ms/step
Epoch 170/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 171/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 56ms/epoch - 9ms/step
Epoch 172/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 57ms/epoch - 10ms/step
Epoch 173/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 46ms/epoch - 8ms/step
Epoch 174/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6167 - 45ms/epoch - 7ms/step
Epoch 175/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 46ms/epoch - 8ms/step
Epoch 176/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 45ms/epoch - 7ms/step
Epoch 177/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 178/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 43ms/epoch - 7ms/step
Epoch 179/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 44ms/epoch - 7ms/step
Epoch 180/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 44ms/epoch - 7ms/step
Epoch 181/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 42ms/epoch - 7ms/step
Epoch 182/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 58ms/epoch - 10ms/step
Epoch 183/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 64ms/epoch - 11ms/step
Epoch 184/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 59ms/epoch - 10ms/step
Epoch 185/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 57ms/epoch - 9ms/step
Epoch 186/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 43ms/epoch - 7ms/step
Epoch 187/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 47ms/epoch - 8ms/step
Epoch 188/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 57ms/epoch - 10ms/step
Epoch 189/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 190/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 45ms/epoch - 8ms/step
Epoch 191/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 48ms/epoch - 8ms/step
Epoch 192/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 57ms/epoch - 9ms/step
Epoch 193/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 194/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 61ms/epoch - 10ms/step
Epoch 195/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 44ms/epoch - 7ms/step
Epoch 196/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 45ms/epoch - 7ms/step
Epoch 197/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 45ms/epoch - 7ms/step
Epoch 198/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 199/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 66ms/epoch - 11ms/step
Epoch 200/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 201/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 43ms/epoch - 7ms/step
Epoch 202/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 43ms/epoch - 7ms/step
Epoch 203/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 61ms/epoch - 10ms/step
Epoch 204/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 45ms/epoch - 7ms/step
Epoch 205/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 62ms/epoch - 10ms/step
Epoch 206/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 59ms/epoch - 10ms/step
Epoch 207/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 208/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 60ms/epoch - 10ms/step
Epoch 209/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 43ms/epoch - 7ms/step
Epoch 210/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 57ms/epoch - 9ms/step
Epoch 211/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 212/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 57ms/epoch - 10ms/step
Epoch 213/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 59ms/epoch - 10ms/step
Epoch 214/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 65ms/epoch - 11ms/step
Epoch 215/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 59ms/epoch - 10ms/step
Epoch 216/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 217/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 43ms/epoch - 7ms/step
Epoch 218/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 45ms/epoch - 7ms/step
Epoch 219/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 42ms/epoch - 7ms/step
Epoch 220/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 43ms/epoch - 7ms/step
Epoch 221/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 68ms/epoch - 11ms/step
Epoch 222/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 42ms/epoch - 7ms/step
Epoch 223/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 42ms/epoch - 7ms/step
Epoch 224/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 56ms/epoch - 9ms/step
Epoch 225/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 42ms/epoch - 7ms/step
Epoch 226/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 70ms/epoch - 12ms/step
Epoch 227/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 58ms/epoch - 10ms/step
Epoch 228/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 49ms/epoch - 8ms/step
Epoch 229/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 57ms/epoch - 9ms/step
Epoch 230/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 41ms/epoch - 7ms/step
Epoch 231/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 55ms/epoch - 9ms/step
Epoch 232/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 45ms/epoch - 7ms/step
Epoch 233/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 46ms/epoch - 8ms/step
Epoch 234/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 44ms/epoch - 7ms/step
Epoch 235/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 61ms/epoch - 10ms/step
Epoch 236/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 44ms/epoch - 7ms/step
Epoch 237/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 58ms/epoch - 10ms/step
Epoch 238/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 46ms/epoch - 8ms/step
Epoch 239/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 60ms/epoch - 10ms/step
Epoch 240/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 45ms/epoch - 7ms/step
Epoch 241/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 58ms/epoch - 10ms/step
Epoch 242/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 59ms/epoch - 10ms/step
Epoch 243/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 45ms/epoch - 8ms/step
Epoch 244/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 59ms/epoch - 10ms/step
Epoch 245/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 42ms/epoch - 7ms/step
Epoch 246/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 43ms/epoch - 7ms/step
Epoch 247/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 56ms/epoch - 9ms/step
Epoch 248/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 45ms/epoch - 7ms/step
Epoch 249/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 45ms/epoch - 8ms/step
Epoch 250/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 53ms/epoch - 9ms/step
Epoch 251/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 60ms/epoch - 10ms/step
Epoch 252/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 253/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 58ms/epoch - 10ms/step
Epoch 254/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 58ms/epoch - 10ms/step
Epoch 255/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 43ms/epoch - 7ms/step
Epoch 256/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 47ms/epoch - 8ms/step
Epoch 257/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 57ms/epoch - 10ms/step
Epoch 258/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 259/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 59ms/epoch - 10ms/step
Epoch 260/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 42ms/epoch - 7ms/step
Epoch 261/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 42ms/epoch - 7ms/step
Epoch 262/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 60ms/epoch - 10ms/step
Epoch 263/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 49ms/epoch - 8ms/step
Epoch 264/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 49ms/epoch - 8ms/step
Epoch 265/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 46ms/epoch - 8ms/step
Epoch 266/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 56ms/epoch - 9ms/step
Epoch 267/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 268/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 269/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6146 - 59ms/epoch - 10ms/step
Epoch 270/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6146 - 45ms/epoch - 7ms/step
Epoch 271/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 58ms/epoch - 10ms/step
Epoch 272/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 50ms/epoch - 8ms/step
Epoch 273/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 58ms/epoch - 10ms/step
Epoch 274/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 43ms/epoch - 7ms/step
Epoch 275/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 43ms/epoch - 7ms/step
Epoch 276/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 51ms/epoch - 9ms/step
Epoch 277/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 42ms/epoch - 7ms/step
Epoch 278/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 57ms/epoch - 9ms/step
Epoch 279/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 45ms/epoch - 8ms/step
Epoch 280/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 45ms/epoch - 8ms/step
Epoch 281/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 44ms/epoch - 7ms/step
Epoch 282/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 59ms/epoch - 10ms/step
Epoch 283/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 42ms/epoch - 7ms/step
Epoch 284/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 56ms/epoch - 9ms/step
Epoch 285/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 57ms/epoch - 9ms/step
Epoch 286/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 45ms/epoch - 7ms/step
Epoch 287/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 73ms/epoch - 12ms/step
Epoch 288/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 60ms/epoch - 10ms/step
Epoch 289/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 63ms/epoch - 10ms/step
Epoch 290/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6125 - 61ms/epoch - 10ms/step
Epoch 291/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 72ms/epoch - 12ms/step
Epoch 292/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 72ms/epoch - 12ms/step
Epoch 293/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 68ms/epoch - 11ms/step
Epoch 294/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 71ms/epoch - 12ms/step
Epoch 295/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 72ms/epoch - 12ms/step
Epoch 296/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 69ms/epoch - 12ms/step
Epoch 297/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 70ms/epoch - 12ms/step
Epoch 298/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6146 - 71ms/epoch - 12ms/step
Epoch 299/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 70ms/epoch - 12ms/step
Epoch 300/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 70ms/epoch - 12ms/step
Epoch 301/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 72ms/epoch - 12ms/step
Epoch 302/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 58ms/epoch - 10ms/step
Epoch 303/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6146 - 77ms/epoch - 13ms/step
Epoch 304/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6187 - 71ms/epoch - 12ms/step
Epoch 305/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 70ms/epoch - 12ms/step
Epoch 306/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 69ms/epoch - 12ms/step
Epoch 307/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 57ms/epoch - 9ms/step
Epoch 308/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 68ms/epoch - 11ms/step
Epoch 309/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6146 - 67ms/epoch - 11ms/step
Epoch 310/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 77ms/epoch - 13ms/step
Epoch 311/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6146 - 56ms/epoch - 9ms/step
Epoch 312/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6146 - 68ms/epoch - 11ms/step
Epoch 313/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6146 - 70ms/epoch - 12ms/step
Epoch 314/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 71ms/epoch - 12ms/step
Epoch 315/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 74ms/epoch - 12ms/step
Epoch 316/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 70ms/epoch - 12ms/step
Epoch 317/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 74ms/epoch - 12ms/step
Epoch 318/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 70ms/epoch - 12ms/step
Epoch 319/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 65ms/epoch - 11ms/step
Epoch 320/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 62ms/epoch - 10ms/step
Epoch 321/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 70ms/epoch - 12ms/step
Epoch 322/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6167 - 72ms/epoch - 12ms/step
Epoch 323/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 75ms/epoch - 12ms/step
Epoch 324/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 72ms/epoch - 12ms/step
Epoch 325/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6146 - 71ms/epoch - 12ms/step
Epoch 326/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 64ms/epoch - 11ms/step
Epoch 327/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 72ms/epoch - 12ms/step
Epoch 328/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 71ms/epoch - 12ms/step
Epoch 329/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6146 - 72ms/epoch - 12ms/step
Epoch 330/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6146 - 68ms/epoch - 11ms/step
Epoch 331/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 80ms/epoch - 13ms/step
Epoch 332/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 75ms/epoch - 13ms/step
Epoch 333/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0769 - val_accuracy: 0.6146 - 73ms/epoch - 12ms/step
Epoch 334/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6125 - 76ms/epoch - 13ms/step
Epoch 335/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0770 - val_accuracy: 0.6125 - 75ms/epoch - 13ms/step
Epoch 336/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6399 - val_loss: 0.0768 - val_accuracy: 0.6146 - 48ms/epoch - 8ms/step
Epoch 337/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0784 - val_accuracy: 0.6104 - 51ms/epoch - 9ms/step
Epoch 338/400
6/6 - 0s - loss: 0.0723 - accuracy: 0.6381 - val_loss: 0.0769 - val_accuracy: 0.6167 - 45ms/epoch - 7ms/step
Epoch 339/400
6/6 - 0s - loss: 0.0732 - accuracy: 0.6336 - val_loss: 0.0774 - val_accuracy: 0.6146 - 44ms/epoch - 7ms/step
Epoch 340/400
6/6 - 0s - loss: 0.0741 - accuracy: 0.6291 - val_loss: 0.0764 - val_accuracy: 0.6187 - 43ms/epoch - 7ms/step
Epoch 341/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6345 - val_loss: 0.0775 - val_accuracy: 0.6125 - 43ms/epoch - 7ms/step
Epoch 342/400
6/6 - 0s - loss: 0.0741 - accuracy: 0.6291 - val_loss: 0.0773 - val_accuracy: 0.6125 - 57ms/epoch - 9ms/step
Epoch 343/400
6/6 - 0s - loss: 0.0743 - accuracy: 0.6291 - val_loss: 0.0779 - val_accuracy: 0.6083 - 47ms/epoch - 8ms/step
Epoch 344/400
6/6 - 0s - loss: 0.0728 - accuracy: 0.6372 - val_loss: 0.0793 - val_accuracy: 0.6042 - 46ms/epoch - 8ms/step
Epoch 345/400
6/6 - 0s - loss: 0.0752 - accuracy: 0.6256 - val_loss: 0.0790 - val_accuracy: 0.6062 - 61ms/epoch - 10ms/step
Epoch 346/400
6/6 - 0s - loss: 0.0757 - accuracy: 0.6220 - val_loss: 0.0772 - val_accuracy: 0.6146 - 64ms/epoch - 11ms/step
Epoch 347/400
6/6 - 0s - loss: 0.0776 - accuracy: 0.6122 - val_loss: 0.0775 - val_accuracy: 0.6146 - 56ms/epoch - 9ms/step
Epoch 348/400
6/6 - 0s - loss: 0.0779 - accuracy: 0.6113 - val_loss: 0.0789 - val_accuracy: 0.6021 - 44ms/epoch - 7ms/step
Epoch 349/400
6/6 - 0s - loss: 0.0783 - accuracy: 0.6095 - val_loss: 0.0800 - val_accuracy: 0.5979 - 57ms/epoch - 10ms/step
Epoch 350/400
6/6 - 0s - loss: 0.0779 - accuracy: 0.6113 - val_loss: 0.0787 - val_accuracy: 0.6083 - 41ms/epoch - 7ms/step
Epoch 351/400
6/6 - 0s - loss: 0.0772 - accuracy: 0.6130 - val_loss: 0.0789 - val_accuracy: 0.6042 - 60ms/epoch - 10ms/step
Epoch 352/400
6/6 - 0s - loss: 0.0771 - accuracy: 0.6157 - val_loss: 0.0791 - val_accuracy: 0.6042 - 43ms/epoch - 7ms/step
Epoch 353/400
6/6 - 0s - loss: 0.0763 - accuracy: 0.6193 - val_loss: 0.0801 - val_accuracy: 0.5979 - 54ms/epoch - 9ms/step
Epoch 354/400
6/6 - 0s - loss: 0.0761 - accuracy: 0.6193 - val_loss: 0.0776 - val_accuracy: 0.6104 - 42ms/epoch - 7ms/step
Epoch 355/400
6/6 - 0s - loss: 0.0759 - accuracy: 0.6211 - val_loss: 0.0771 - val_accuracy: 0.6146 - 49ms/epoch - 8ms/step
Epoch 356/400
6/6 - 0s - loss: 0.0760 - accuracy: 0.6202 - val_loss: 0.0789 - val_accuracy: 0.6042 - 42ms/epoch - 7ms/step
Epoch 357/400
6/6 - 0s - loss: 0.0750 - accuracy: 0.6256 - val_loss: 0.0770 - val_accuracy: 0.6125 - 44ms/epoch - 7ms/step
Epoch 358/400
6/6 - 0s - loss: 0.0746 - accuracy: 0.6282 - val_loss: 0.0757 - val_accuracy: 0.6208 - 45ms/epoch - 7ms/step
Epoch 359/400
6/6 - 0s - loss: 0.0741 - accuracy: 0.6300 - val_loss: 0.0759 - val_accuracy: 0.6229 - 44ms/epoch - 7ms/step
Epoch 360/400
6/6 - 0s - loss: 0.0737 - accuracy: 0.6336 - val_loss: 0.0774 - val_accuracy: 0.6146 - 43ms/epoch - 7ms/step
Epoch 361/400
6/6 - 0s - loss: 0.0730 - accuracy: 0.6354 - val_loss: 0.0769 - val_accuracy: 0.6187 - 57ms/epoch - 10ms/step
Epoch 362/400
6/6 - 0s - loss: 0.0728 - accuracy: 0.6381 - val_loss: 0.0772 - val_accuracy: 0.6146 - 46ms/epoch - 8ms/step
Epoch 363/400
6/6 - 0s - loss: 0.0730 - accuracy: 0.6345 - val_loss: 0.0775 - val_accuracy: 0.6125 - 44ms/epoch - 7ms/step
Epoch 364/400
6/6 - 0s - loss: 0.0735 - accuracy: 0.6327 - val_loss: 0.0776 - val_accuracy: 0.6125 - 59ms/epoch - 10ms/step
Epoch 365/400
6/6 - 0s - loss: 0.0722 - accuracy: 0.6399 - val_loss: 0.0772 - val_accuracy: 0.6146 - 59ms/epoch - 10ms/step
Epoch 366/400
6/6 - 0s - loss: 0.0714 - accuracy: 0.6434 - val_loss: 0.0790 - val_accuracy: 0.6042 - 59ms/epoch - 10ms/step
Epoch 367/400
6/6 - 0s - loss: 0.0712 - accuracy: 0.6452 - val_loss: 0.0777 - val_accuracy: 0.6125 - 66ms/epoch - 11ms/step
Epoch 368/400
6/6 - 0s - loss: 0.0710 - accuracy: 0.6452 - val_loss: 0.0771 - val_accuracy: 0.6146 - 45ms/epoch - 7ms/step
Epoch 369/400
6/6 - 0s - loss: 0.0709 - accuracy: 0.6461 - val_loss: 0.0775 - val_accuracy: 0.6146 - 59ms/epoch - 10ms/step
Epoch 370/400
6/6 - 0s - loss: 0.0707 - accuracy: 0.6470 - val_loss: 0.0782 - val_accuracy: 0.6104 - 45ms/epoch - 8ms/step
Epoch 371/400
6/6 - 0s - loss: 0.0707 - accuracy: 0.6470 - val_loss: 0.0774 - val_accuracy: 0.6125 - 55ms/epoch - 9ms/step
Epoch 372/400
6/6 - 0s - loss: 0.0709 - accuracy: 0.6461 - val_loss: 0.0774 - val_accuracy: 0.6146 - 57ms/epoch - 9ms/step
Epoch 373/400
6/6 - 0s - loss: 0.0708 - accuracy: 0.6461 - val_loss: 0.0772 - val_accuracy: 0.6125 - 60ms/epoch - 10ms/step
Epoch 374/400
6/6 - 0s - loss: 0.0706 - accuracy: 0.6470 - val_loss: 0.0776 - val_accuracy: 0.6125 - 59ms/epoch - 10ms/step
Epoch 375/400
6/6 - 0s - loss: 0.0707 - accuracy: 0.6470 - val_loss: 0.0777 - val_accuracy: 0.6125 - 45ms/epoch - 8ms/step
Epoch 376/400
6/6 - 0s - loss: 0.0706 - accuracy: 0.6470 - val_loss: 0.0778 - val_accuracy: 0.6104 - 61ms/epoch - 10ms/step
Epoch 377/400
6/6 - 0s - loss: 0.0706 - accuracy: 0.6470 - val_loss: 0.0778 - val_accuracy: 0.6083 - 44ms/epoch - 7ms/step
Epoch 378/400
6/6 - 0s - loss: 0.0706 - accuracy: 0.6470 - val_loss: 0.0780 - val_accuracy: 0.6083 - 51ms/epoch - 8ms/step
Epoch 379/400
6/6 - 0s - loss: 0.0706 - accuracy: 0.6470 - val_loss: 0.0779 - val_accuracy: 0.6083 - 46ms/epoch - 8ms/step
Epoch 380/400
6/6 - 0s - loss: 0.0706 - accuracy: 0.6470 - val_loss: 0.0778 - val_accuracy: 0.6125 - 44ms/epoch - 7ms/step
Epoch 381/400
6/6 - 0s - loss: 0.0705 - accuracy: 0.6479 - val_loss: 0.0777 - val_accuracy: 0.6125 - 46ms/epoch - 8ms/step
Epoch 382/400
6/6 - 0s - loss: 0.0705 - accuracy: 0.6479 - val_loss: 0.0777 - val_accuracy: 0.6083 - 59ms/epoch - 10ms/step
Epoch 383/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0776 - val_accuracy: 0.6125 - 44ms/epoch - 7ms/step
Epoch 384/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0775 - val_accuracy: 0.6125 - 44ms/epoch - 7ms/step
Epoch 385/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0776 - val_accuracy: 0.6125 - 43ms/epoch - 7ms/step
Epoch 386/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0776 - val_accuracy: 0.6125 - 49ms/epoch - 8ms/step
Epoch 387/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0777 - val_accuracy: 0.6104 - 56ms/epoch - 9ms/step
Epoch 388/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0777 - val_accuracy: 0.6083 - 43ms/epoch - 7ms/step
Epoch 389/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0777 - val_accuracy: 0.6083 - 56ms/epoch - 9ms/step
Epoch 390/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0777 - val_accuracy: 0.6083 - 51ms/epoch - 8ms/step
Epoch 391/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0778 - val_accuracy: 0.6083 - 42ms/epoch - 7ms/step
Epoch 392/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0778 - val_accuracy: 0.6104 - 56ms/epoch - 9ms/step
Epoch 393/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0778 - val_accuracy: 0.6104 - 45ms/epoch - 8ms/step
Epoch 394/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0778 - val_accuracy: 0.6083 - 42ms/epoch - 7ms/step
Epoch 395/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0778 - val_accuracy: 0.6083 - 59ms/epoch - 10ms/step
Epoch 396/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0778 - val_accuracy: 0.6083 - 43ms/epoch - 7ms/step
Epoch 397/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0778 - val_accuracy: 0.6083 - 46ms/epoch - 8ms/step
Epoch 398/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0778 - val_accuracy: 0.6083 - 58ms/epoch - 10ms/step
Epoch 399/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0778 - val_accuracy: 0.6083 - 43ms/epoch - 7ms/step
Epoch 400/400
6/6 - 0s - loss: 0.0704 - accuracy: 0.6479 - val_loss: 0.0779 - val_accuracy: 0.6083 - 44ms/epoch - 7ms/step
In [35]:
# predicting the model on test data
y_pred=model.predict(x_test)
15/15 [==============================] - 0s 2ms/step
In [36]:
y_pred[0]
Out[36]:
array([7.5059706e-20, 4.5178044e-15, 1.4451022e-21, 1.1218585e-22,
       1.0324295e-13, 2.0835235e-11, 1.0000000e+00, 7.0103475e-16,
       8.8083591e-15, 2.2171640e-18], dtype=float32)
In [37]:
# Since the outputs are in probabilities we try to get the label
y_pred_final=[]
for i in y_pred:
  y_pred_final.append(np.argmax(i))
In [38]:
from sklearn.metrics import classification_report
print(classification_report(y_test,y_pred_final))
              precision    recall  f1-score   support

           3       0.00      0.00      0.00         2
           4       0.00      0.00      0.00        21
           5       0.68      0.73      0.71       207
           6       0.54      0.72      0.62       195
           7       0.00      0.00      0.00        52
           8       0.00      0.00      0.00         3

    accuracy                           0.61       480
   macro avg       0.20      0.24      0.22       480
weighted avg       0.52      0.61      0.56       480

In [39]:
from sklearn.metrics import confusion_matrix             #Confusion matrix
import seaborn as sns

cm=confusion_matrix(y_test,y_pred_final)

plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Predicted')
plt.ylabel('Truth')
plt.show()
In [40]:
import matplotlib.pyplot as plt           #plotting the accuracy and losses after each iteration


def plot_graphs(history, string):
  plt.plot(history.history[string])
  plt.plot(history.history['val_'+string])
  plt.xlabel("Epochs")
  plt.ylabel(string)
  plt.legend([string, 'val_'+string])
  plt.show()

plot_graphs(history, 'accuracy')
plot_graphs(history, 'loss')

Implementing dropout in the model¶

In [41]:
from keras import losses
from keras import optimizers
from keras.models import Sequential
from keras.layers import Dense,LeakyReLU,Dropout,BatchNormalization

# create model
model = Sequential()

model.add(Dense(128, activation='relu',kernel_initializer='normal',input_shape=(11,))) ###Multiple Dense layers with Relu activation
model.add(Dense(64, activation='relu',kernel_initializer='normal'))
model.add(Dropout(0.5))
model.add(Dense(32, activation='relu',kernel_initializer='normal'))
model.add(LeakyReLU(alpha=0.1))
model.add(Dropout(0.5))
model.add(Dense(16, activation='relu',kernel_initializer='normal'))
model.add(LeakyReLU(alpha=0.1))
model.add(Dropout(0.5))

model.add(Dense(num_classes, activation='softmax')) ### For multiclass classification Softmax activation function is used
In [42]:
adam = optimizers.Adam(learning_rate=1e-3)
model.compile(loss='mean_absolute_error', optimizer=adam, metrics=['accuracy']) ### Loss function = MSE
In [43]:
model.summary() #Summary of the neural network model
Model: "sequential_1"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense_5 (Dense)             (None, 128)               1536      
                                                                 
 dense_6 (Dense)             (None, 64)                8256      
                                                                 
 dropout (Dropout)           (None, 64)                0         
                                                                 
 dense_7 (Dense)             (None, 32)                2080      
                                                                 
 leaky_re_lu_2 (LeakyReLU)   (None, 32)                0         
                                                                 
 dropout_1 (Dropout)         (None, 32)                0         
                                                                 
 dense_8 (Dense)             (None, 16)                528       
                                                                 
 leaky_re_lu_3 (LeakyReLU)   (None, 16)                0         
                                                                 
 dropout_2 (Dropout)         (None, 16)                0         
                                                                 
 dense_9 (Dense)             (None, 10)                170       
                                                                 
=================================================================
Total params: 12,570
Trainable params: 12,570
Non-trainable params: 0
_________________________________________________________________
In [44]:
# Fit the model
history=model.fit(x_train, y_train_cat, validation_data=(x_test,y_test_cat), epochs=400, batch_size=200, verbose=2)
Epoch 1/400
6/6 - 1s - loss: 0.1800 - accuracy: 0.1984 - val_loss: 0.1798 - val_accuracy: 0.4062 - 754ms/epoch - 126ms/step
Epoch 2/400
6/6 - 0s - loss: 0.1797 - accuracy: 0.3503 - val_loss: 0.1796 - val_accuracy: 0.4062 - 45ms/epoch - 8ms/step
Epoch 3/400
6/6 - 0s - loss: 0.1795 - accuracy: 0.3244 - val_loss: 0.1793 - val_accuracy: 0.4062 - 45ms/epoch - 8ms/step
Epoch 4/400
6/6 - 0s - loss: 0.1792 - accuracy: 0.3467 - val_loss: 0.1790 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 5/400
6/6 - 0s - loss: 0.1788 - accuracy: 0.3315 - val_loss: 0.1784 - val_accuracy: 0.4062 - 43ms/epoch - 7ms/step
Epoch 6/400
6/6 - 0s - loss: 0.1781 - accuracy: 0.3441 - val_loss: 0.1774 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 7/400
6/6 - 0s - loss: 0.1766 - accuracy: 0.3378 - val_loss: 0.1750 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 8/400
6/6 - 0s - loss: 0.1737 - accuracy: 0.3146 - val_loss: 0.1695 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 9/400
6/6 - 0s - loss: 0.1663 - accuracy: 0.3485 - val_loss: 0.1574 - val_accuracy: 0.4062 - 58ms/epoch - 10ms/step
Epoch 10/400
6/6 - 0s - loss: 0.1571 - accuracy: 0.3199 - val_loss: 0.1385 - val_accuracy: 0.4062 - 58ms/epoch - 10ms/step
Epoch 11/400
6/6 - 0s - loss: 0.1462 - accuracy: 0.3458 - val_loss: 0.1249 - val_accuracy: 0.4062 - 58ms/epoch - 10ms/step
Epoch 12/400
6/6 - 0s - loss: 0.1390 - accuracy: 0.3441 - val_loss: 0.1204 - val_accuracy: 0.4062 - 55ms/epoch - 9ms/step
Epoch 13/400
6/6 - 0s - loss: 0.1339 - accuracy: 0.3476 - val_loss: 0.1192 - val_accuracy: 0.4062 - 63ms/epoch - 10ms/step
Epoch 14/400
6/6 - 0s - loss: 0.1289 - accuracy: 0.3709 - val_loss: 0.1189 - val_accuracy: 0.4062 - 59ms/epoch - 10ms/step
Epoch 15/400
6/6 - 0s - loss: 0.1257 - accuracy: 0.3825 - val_loss: 0.1188 - val_accuracy: 0.4062 - 62ms/epoch - 10ms/step
Epoch 16/400
6/6 - 0s - loss: 0.1237 - accuracy: 0.3896 - val_loss: 0.1188 - val_accuracy: 0.4062 - 58ms/epoch - 10ms/step
Epoch 17/400
6/6 - 0s - loss: 0.1243 - accuracy: 0.3843 - val_loss: 0.1188 - val_accuracy: 0.4062 - 46ms/epoch - 8ms/step
Epoch 18/400
6/6 - 0s - loss: 0.1240 - accuracy: 0.3807 - val_loss: 0.1188 - val_accuracy: 0.4062 - 45ms/epoch - 8ms/step
Epoch 19/400
6/6 - 0s - loss: 0.1227 - accuracy: 0.3896 - val_loss: 0.1188 - val_accuracy: 0.4062 - 45ms/epoch - 7ms/step
Epoch 20/400
6/6 - 0s - loss: 0.1219 - accuracy: 0.3896 - val_loss: 0.1188 - val_accuracy: 0.4062 - 53ms/epoch - 9ms/step
Epoch 21/400
6/6 - 0s - loss: 0.1221 - accuracy: 0.3950 - val_loss: 0.1188 - val_accuracy: 0.4062 - 58ms/epoch - 10ms/step
Epoch 22/400
6/6 - 0s - loss: 0.1230 - accuracy: 0.3878 - val_loss: 0.1188 - val_accuracy: 0.4062 - 45ms/epoch - 7ms/step
Epoch 23/400
6/6 - 0s - loss: 0.1228 - accuracy: 0.3896 - val_loss: 0.1188 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 24/400
6/6 - 0s - loss: 0.1219 - accuracy: 0.3914 - val_loss: 0.1188 - val_accuracy: 0.4062 - 43ms/epoch - 7ms/step
Epoch 25/400
6/6 - 0s - loss: 0.1235 - accuracy: 0.3843 - val_loss: 0.1188 - val_accuracy: 0.4062 - 47ms/epoch - 8ms/step
Epoch 26/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3959 - val_loss: 0.1188 - val_accuracy: 0.4062 - 43ms/epoch - 7ms/step
Epoch 27/400
6/6 - 0s - loss: 0.1216 - accuracy: 0.3941 - val_loss: 0.1188 - val_accuracy: 0.4062 - 43ms/epoch - 7ms/step
Epoch 28/400
6/6 - 0s - loss: 0.1209 - accuracy: 0.3986 - val_loss: 0.1188 - val_accuracy: 0.4062 - 55ms/epoch - 9ms/step
Epoch 29/400
6/6 - 0s - loss: 0.1213 - accuracy: 0.3959 - val_loss: 0.1188 - val_accuracy: 0.4062 - 45ms/epoch - 7ms/step
Epoch 30/400
6/6 - 0s - loss: 0.1214 - accuracy: 0.3950 - val_loss: 0.1188 - val_accuracy: 0.4062 - 51ms/epoch - 9ms/step
Epoch 31/400
6/6 - 0s - loss: 0.1217 - accuracy: 0.3932 - val_loss: 0.1188 - val_accuracy: 0.4062 - 58ms/epoch - 10ms/step
Epoch 32/400
6/6 - 0s - loss: 0.1215 - accuracy: 0.3941 - val_loss: 0.1188 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 33/400
6/6 - 0s - loss: 0.1215 - accuracy: 0.3941 - val_loss: 0.1187 - val_accuracy: 0.4062 - 63ms/epoch - 11ms/step
Epoch 34/400
6/6 - 0s - loss: 0.1210 - accuracy: 0.3968 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 35/400
6/6 - 0s - loss: 0.1217 - accuracy: 0.3932 - val_loss: 0.1188 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 36/400
6/6 - 0s - loss: 0.1215 - accuracy: 0.3941 - val_loss: 0.1187 - val_accuracy: 0.4062 - 48ms/epoch - 8ms/step
Epoch 37/400
6/6 - 0s - loss: 0.1213 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 38/400
6/6 - 0s - loss: 0.1210 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 39/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 59ms/epoch - 10ms/step
Epoch 40/400
6/6 - 0s - loss: 0.1214 - accuracy: 0.3941 - val_loss: 0.1187 - val_accuracy: 0.4062 - 64ms/epoch - 11ms/step
Epoch 41/400
6/6 - 0s - loss: 0.1216 - accuracy: 0.3932 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 42/400
6/6 - 0s - loss: 0.1209 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 57ms/epoch - 10ms/step
Epoch 43/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 56ms/epoch - 9ms/step
Epoch 44/400
6/6 - 0s - loss: 0.1212 - accuracy: 0.3941 - val_loss: 0.1187 - val_accuracy: 0.4062 - 46ms/epoch - 8ms/step
Epoch 45/400
6/6 - 0s - loss: 0.1216 - accuracy: 0.3941 - val_loss: 0.1187 - val_accuracy: 0.4062 - 46ms/epoch - 8ms/step
Epoch 46/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 45ms/epoch - 8ms/step
Epoch 47/400
6/6 - 0s - loss: 0.1208 - accuracy: 0.3968 - val_loss: 0.1187 - val_accuracy: 0.4062 - 43ms/epoch - 7ms/step
Epoch 48/400
6/6 - 0s - loss: 0.1209 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 45ms/epoch - 8ms/step
Epoch 49/400
6/6 - 0s - loss: 0.1214 - accuracy: 0.3941 - val_loss: 0.1187 - val_accuracy: 0.4062 - 66ms/epoch - 11ms/step
Epoch 50/400
6/6 - 0s - loss: 0.1214 - accuracy: 0.3941 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 51/400
6/6 - 0s - loss: 0.1214 - accuracy: 0.3941 - val_loss: 0.1187 - val_accuracy: 0.4062 - 52ms/epoch - 9ms/step
Epoch 52/400
6/6 - 0s - loss: 0.1215 - accuracy: 0.3941 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 53/400
6/6 - 0s - loss: 0.1215 - accuracy: 0.3941 - val_loss: 0.1187 - val_accuracy: 0.4062 - 59ms/epoch - 10ms/step
Epoch 54/400
6/6 - 0s - loss: 0.1208 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 59ms/epoch - 10ms/step
Epoch 55/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 58ms/epoch - 10ms/step
Epoch 56/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 43ms/epoch - 7ms/step
Epoch 57/400
6/6 - 0s - loss: 0.1210 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 58/400
6/6 - 0s - loss: 0.1210 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 47ms/epoch - 8ms/step
Epoch 59/400
6/6 - 0s - loss: 0.1212 - accuracy: 0.3941 - val_loss: 0.1187 - val_accuracy: 0.4062 - 43ms/epoch - 7ms/step
Epoch 60/400
6/6 - 0s - loss: 0.1208 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 59ms/epoch - 10ms/step
Epoch 61/400
6/6 - 0s - loss: 0.1213 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 47ms/epoch - 8ms/step
Epoch 62/400
6/6 - 0s - loss: 0.1208 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 45ms/epoch - 8ms/step
Epoch 63/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3968 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 64/400
6/6 - 0s - loss: 0.1215 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 59ms/epoch - 10ms/step
Epoch 65/400
6/6 - 0s - loss: 0.1207 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 66/400
6/6 - 0s - loss: 0.1208 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 56ms/epoch - 9ms/step
Epoch 67/400
6/6 - 0s - loss: 0.1210 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 61ms/epoch - 10ms/step
Epoch 68/400
6/6 - 0s - loss: 0.1210 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 58ms/epoch - 10ms/step
Epoch 69/400
6/6 - 0s - loss: 0.1214 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 57ms/epoch - 10ms/step
Epoch 70/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 48ms/epoch - 8ms/step
Epoch 71/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 45ms/epoch - 8ms/step
Epoch 72/400
6/6 - 0s - loss: 0.1213 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 47ms/epoch - 8ms/step
Epoch 73/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 59ms/epoch - 10ms/step
Epoch 74/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 75/400
6/6 - 0s - loss: 0.1210 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 76/400
6/6 - 0s - loss: 0.1213 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 45ms/epoch - 7ms/step
Epoch 77/400
6/6 - 0s - loss: 0.1217 - accuracy: 0.3932 - val_loss: 0.1187 - val_accuracy: 0.4062 - 44ms/epoch - 7ms/step
Epoch 78/400
6/6 - 0s - loss: 0.1208 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 46ms/epoch - 8ms/step
Epoch 79/400
6/6 - 0s - loss: 0.1209 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 60ms/epoch - 10ms/step
Epoch 80/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 46ms/epoch - 8ms/step
Epoch 81/400
6/6 - 0s - loss: 0.1210 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 43ms/epoch - 7ms/step
Epoch 82/400
6/6 - 0s - loss: 0.1208 - accuracy: 0.3968 - val_loss: 0.1187 - val_accuracy: 0.4062 - 45ms/epoch - 8ms/step
Epoch 83/400
6/6 - 0s - loss: 0.1209 - accuracy: 0.3968 - val_loss: 0.1187 - val_accuracy: 0.4062 - 42ms/epoch - 7ms/step
Epoch 84/400
6/6 - 0s - loss: 0.1207 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 73ms/epoch - 12ms/step
Epoch 85/400
6/6 - 0s - loss: 0.1210 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 67ms/epoch - 11ms/step
Epoch 86/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 66ms/epoch - 11ms/step
Epoch 87/400
6/6 - 0s - loss: 0.1210 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 69ms/epoch - 11ms/step
Epoch 88/400
6/6 - 0s - loss: 0.1209 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 73ms/epoch - 12ms/step
Epoch 89/400
6/6 - 0s - loss: 0.1208 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 72ms/epoch - 12ms/step
Epoch 90/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 74ms/epoch - 12ms/step
Epoch 91/400
6/6 - 0s - loss: 0.1212 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 59ms/epoch - 10ms/step
Epoch 92/400
6/6 - 0s - loss: 0.1209 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 63ms/epoch - 11ms/step
Epoch 93/400
6/6 - 0s - loss: 0.1204 - accuracy: 0.3977 - val_loss: 0.1187 - val_accuracy: 0.4062 - 76ms/epoch - 13ms/step
Epoch 94/400
6/6 - 0s - loss: 0.1209 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 74ms/epoch - 12ms/step
Epoch 95/400
6/6 - 0s - loss: 0.1209 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 65ms/epoch - 11ms/step
Epoch 96/400
6/6 - 0s - loss: 0.1210 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 73ms/epoch - 12ms/step
Epoch 97/400
6/6 - 0s - loss: 0.1207 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 61ms/epoch - 10ms/step
Epoch 98/400
6/6 - 0s - loss: 0.1211 - accuracy: 0.3950 - val_loss: 0.1187 - val_accuracy: 0.4062 - 76ms/epoch - 13ms/step
Epoch 99/400
6/6 - 0s - loss: 0.1209 - accuracy: 0.3959 - val_loss: 0.1187 - val_accuracy: 0.4062 - 72ms/epoch - 12ms/step
Epoch 100/400
6/6 - 0s - loss: 0.1202 - accuracy: 0.3968 - val_loss: 0.1185 - val_accuracy: 0.4062 - 76ms/epoch - 13ms/step
Epoch 101/400
6/6 - 0s - loss: 0.1204 - accuracy: 0.3959 - val_loss: 0.1183 - val_accuracy: 0.4062 - 76ms/epoch - 13ms/step
Epoch 102/400
6/6 - 0s - loss: 0.1206 - accuracy: 0.3950 - val_loss: 0.1182 - val_accuracy: 0.4062 - 67ms/epoch - 11ms/step
Epoch 103/400
6/6 - 0s - loss: 0.1205 - accuracy: 0.3959 - val_loss: 0.1182 - val_accuracy: 0.4062 - 65ms/epoch - 11ms/step
Epoch 104/400
6/6 - 0s - loss: 0.1201 - accuracy: 0.3959 - val_loss: 0.1183 - val_accuracy: 0.4062 - 71ms/epoch - 12ms/step
Epoch 105/400
6/6 - 0s - loss: 0.1202 - accuracy: 0.3959 - val_loss: 0.1182 - val_accuracy: 0.4062 - 74ms/epoch - 12ms/step
Epoch 106/400
6/6 - 0s - loss: 0.1202 - accuracy: 0.3968 - val_loss: 0.1181 - val_accuracy: 0.4062 - 75ms/epoch - 13ms/step
Epoch 107/400
6/6 - 0s - loss: 0.1198 - accuracy: 0.3959 - val_loss: 0.1181 - val_accuracy: 0.4062 - 74ms/epoch - 12ms/step
Epoch 108/400
6/6 - 0s - loss: 0.1203 - accuracy: 0.3950 - val_loss: 0.1180 - val_accuracy: 0.4062 - 63ms/epoch - 10ms/step
Epoch 109/400
6/6 - 0s - loss: 0.1195 - accuracy: 0.3959 - val_loss: 0.1179 - val_accuracy: 0.4062 - 84ms/epoch - 14ms/step
Epoch 110/400
6/6 - 0s - loss: 0.1197 - accuracy: 0.3950 - val_loss: 0.1179 - val_accuracy: 0.4062 - 75ms/epoch - 12ms/step
Epoch 111/400
6/6 - 0s - loss: 0.1198 - accuracy: 0.3986 - val_loss: 0.1179 - val_accuracy: 0.4062 - 76ms/epoch - 13ms/step
Epoch 112/400
6/6 - 0s - loss: 0.1191 - accuracy: 0.3968 - val_loss: 0.1177 - val_accuracy: 0.4062 - 80ms/epoch - 13ms/step
Epoch 113/400
6/6 - 0s - loss: 0.1189 - accuracy: 0.3977 - val_loss: 0.1174 - val_accuracy: 0.4062 - 74ms/epoch - 12ms/step
Epoch 114/400
6/6 - 0s - loss: 0.1191 - accuracy: 0.3986 - val_loss: 0.1171 - val_accuracy: 0.4062 - 75ms/epoch - 12ms/step
Epoch 115/400
6/6 - 0s - loss: 0.1188 - accuracy: 0.3995 - val_loss: 0.1165 - val_accuracy: 0.4062 - 62ms/epoch - 10ms/step
Epoch 116/400
6/6 - 0s - loss: 0.1167 - accuracy: 0.4227 - val_loss: 0.1153 - val_accuracy: 0.4458 - 64ms/epoch - 11ms/step
Epoch 117/400
6/6 - 0s - loss: 0.1162 - accuracy: 0.4281 - val_loss: 0.1128 - val_accuracy: 0.4583 - 77ms/epoch - 13ms/step
Epoch 118/400
6/6 - 0s - loss: 0.1128 - accuracy: 0.4513 - val_loss: 0.1091 - val_accuracy: 0.4750 - 82ms/epoch - 14ms/step
Epoch 119/400
6/6 - 0s - loss: 0.1084 - accuracy: 0.4745 - val_loss: 0.1021 - val_accuracy: 0.4979 - 77ms/epoch - 13ms/step
Epoch 120/400
6/6 - 0s - loss: 0.1081 - accuracy: 0.4710 - val_loss: 0.0965 - val_accuracy: 0.5250 - 82ms/epoch - 14ms/step
Epoch 121/400
6/6 - 0s - loss: 0.1057 - accuracy: 0.4772 - val_loss: 0.0885 - val_accuracy: 0.5667 - 79ms/epoch - 13ms/step
Epoch 122/400
6/6 - 0s - loss: 0.1007 - accuracy: 0.5058 - val_loss: 0.0852 - val_accuracy: 0.5771 - 83ms/epoch - 14ms/step
Epoch 123/400
6/6 - 0s - loss: 0.0973 - accuracy: 0.5228 - val_loss: 0.0847 - val_accuracy: 0.5771 - 80ms/epoch - 13ms/step
Epoch 124/400
6/6 - 0s - loss: 0.0931 - accuracy: 0.5389 - val_loss: 0.0828 - val_accuracy: 0.5896 - 77ms/epoch - 13ms/step
Epoch 125/400
6/6 - 0s - loss: 0.0940 - accuracy: 0.5326 - val_loss: 0.0810 - val_accuracy: 0.6000 - 89ms/epoch - 15ms/step
Epoch 126/400
6/6 - 0s - loss: 0.0906 - accuracy: 0.5505 - val_loss: 0.0816 - val_accuracy: 0.5938 - 78ms/epoch - 13ms/step
Epoch 127/400
6/6 - 0s - loss: 0.0900 - accuracy: 0.5523 - val_loss: 0.0819 - val_accuracy: 0.5875 - 73ms/epoch - 12ms/step
Epoch 128/400
6/6 - 0s - loss: 0.0894 - accuracy: 0.5684 - val_loss: 0.0819 - val_accuracy: 0.5938 - 63ms/epoch - 10ms/step
Epoch 129/400
6/6 - 0s - loss: 0.0900 - accuracy: 0.5559 - val_loss: 0.0824 - val_accuracy: 0.5896 - 59ms/epoch - 10ms/step
Epoch 130/400
6/6 - 0s - loss: 0.0884 - accuracy: 0.5630 - val_loss: 0.0808 - val_accuracy: 0.5958 - 58ms/epoch - 10ms/step
Epoch 131/400
6/6 - 0s - loss: 0.0900 - accuracy: 0.5541 - val_loss: 0.0791 - val_accuracy: 0.6062 - 44ms/epoch - 7ms/step
Epoch 132/400
6/6 - 0s - loss: 0.0880 - accuracy: 0.5675 - val_loss: 0.0796 - val_accuracy: 0.5979 - 60ms/epoch - 10ms/step
Epoch 133/400
6/6 - 0s - loss: 0.0867 - accuracy: 0.5702 - val_loss: 0.0802 - val_accuracy: 0.6000 - 45ms/epoch - 7ms/step
Epoch 134/400
6/6 - 0s - loss: 0.0863 - accuracy: 0.5746 - val_loss: 0.0805 - val_accuracy: 0.6000 - 59ms/epoch - 10ms/step
Epoch 135/400
6/6 - 0s - loss: 0.0866 - accuracy: 0.5710 - val_loss: 0.0804 - val_accuracy: 0.5979 - 45ms/epoch - 7ms/step
Epoch 136/400
6/6 - 0s - loss: 0.0860 - accuracy: 0.5719 - val_loss: 0.0799 - val_accuracy: 0.5979 - 44ms/epoch - 7ms/step
Epoch 137/400
6/6 - 0s - loss: 0.0852 - accuracy: 0.5755 - val_loss: 0.0806 - val_accuracy: 0.6000 - 44ms/epoch - 7ms/step
Epoch 138/400
6/6 - 0s - loss: 0.0873 - accuracy: 0.5675 - val_loss: 0.0805 - val_accuracy: 0.6021 - 61ms/epoch - 10ms/step
Epoch 139/400
6/6 - 0s - loss: 0.0839 - accuracy: 0.5871 - val_loss: 0.0791 - val_accuracy: 0.6104 - 48ms/epoch - 8ms/step
Epoch 140/400
6/6 - 0s - loss: 0.0854 - accuracy: 0.5755 - val_loss: 0.0779 - val_accuracy: 0.6083 - 43ms/epoch - 7ms/step
Epoch 141/400
6/6 - 0s - loss: 0.0835 - accuracy: 0.5871 - val_loss: 0.0773 - val_accuracy: 0.6104 - 46ms/epoch - 8ms/step
Epoch 142/400
6/6 - 0s - loss: 0.0832 - accuracy: 0.5889 - val_loss: 0.0777 - val_accuracy: 0.6125 - 59ms/epoch - 10ms/step
Epoch 143/400
6/6 - 0s - loss: 0.0837 - accuracy: 0.5845 - val_loss: 0.0787 - val_accuracy: 0.6062 - 44ms/epoch - 7ms/step
Epoch 144/400
6/6 - 0s - loss: 0.0841 - accuracy: 0.5853 - val_loss: 0.0807 - val_accuracy: 0.5958 - 59ms/epoch - 10ms/step
Epoch 145/400
6/6 - 0s - loss: 0.0827 - accuracy: 0.5880 - val_loss: 0.0824 - val_accuracy: 0.5896 - 43ms/epoch - 7ms/step
Epoch 146/400
6/6 - 0s - loss: 0.0828 - accuracy: 0.5925 - val_loss: 0.0825 - val_accuracy: 0.5854 - 44ms/epoch - 7ms/step
Epoch 147/400
6/6 - 0s - loss: 0.0823 - accuracy: 0.5889 - val_loss: 0.0822 - val_accuracy: 0.5896 - 46ms/epoch - 8ms/step
Epoch 148/400
6/6 - 0s - loss: 0.0815 - accuracy: 0.5916 - val_loss: 0.0812 - val_accuracy: 0.5938 - 51ms/epoch - 8ms/step
Epoch 149/400
6/6 - 0s - loss: 0.0822 - accuracy: 0.5934 - val_loss: 0.0806 - val_accuracy: 0.5938 - 49ms/epoch - 8ms/step
Epoch 150/400
6/6 - 0s - loss: 0.0841 - accuracy: 0.5836 - val_loss: 0.0815 - val_accuracy: 0.5917 - 57ms/epoch - 9ms/step
Epoch 151/400
6/6 - 0s - loss: 0.0837 - accuracy: 0.5871 - val_loss: 0.0815 - val_accuracy: 0.5896 - 44ms/epoch - 7ms/step
Epoch 152/400
6/6 - 0s - loss: 0.0817 - accuracy: 0.5979 - val_loss: 0.0800 - val_accuracy: 0.6000 - 59ms/epoch - 10ms/step
Epoch 153/400
6/6 - 0s - loss: 0.0820 - accuracy: 0.5934 - val_loss: 0.0792 - val_accuracy: 0.6021 - 45ms/epoch - 7ms/step
Epoch 154/400
6/6 - 0s - loss: 0.0830 - accuracy: 0.5871 - val_loss: 0.0799 - val_accuracy: 0.6021 - 44ms/epoch - 7ms/step
Epoch 155/400
6/6 - 0s - loss: 0.0804 - accuracy: 0.6014 - val_loss: 0.0801 - val_accuracy: 0.5979 - 43ms/epoch - 7ms/step
Epoch 156/400
6/6 - 0s - loss: 0.0830 - accuracy: 0.5871 - val_loss: 0.0801 - val_accuracy: 0.6021 - 43ms/epoch - 7ms/step
Epoch 157/400
6/6 - 0s - loss: 0.0806 - accuracy: 0.6032 - val_loss: 0.0807 - val_accuracy: 0.5938 - 45ms/epoch - 7ms/step
Epoch 158/400
6/6 - 0s - loss: 0.0806 - accuracy: 0.6005 - val_loss: 0.0811 - val_accuracy: 0.5938 - 60ms/epoch - 10ms/step
Epoch 159/400
6/6 - 0s - loss: 0.0803 - accuracy: 0.5987 - val_loss: 0.0808 - val_accuracy: 0.5958 - 58ms/epoch - 10ms/step
Epoch 160/400
6/6 - 0s - loss: 0.0805 - accuracy: 0.6014 - val_loss: 0.0807 - val_accuracy: 0.6000 - 44ms/epoch - 7ms/step
Epoch 161/400
6/6 - 0s - loss: 0.0816 - accuracy: 0.5979 - val_loss: 0.0816 - val_accuracy: 0.5917 - 53ms/epoch - 9ms/step
Epoch 162/400
6/6 - 0s - loss: 0.0791 - accuracy: 0.6130 - val_loss: 0.0821 - val_accuracy: 0.5917 - 46ms/epoch - 8ms/step
Epoch 163/400
6/6 - 0s - loss: 0.0820 - accuracy: 0.5898 - val_loss: 0.0823 - val_accuracy: 0.5896 - 49ms/epoch - 8ms/step
Epoch 164/400
6/6 - 0s - loss: 0.0795 - accuracy: 0.6041 - val_loss: 0.0818 - val_accuracy: 0.5917 - 47ms/epoch - 8ms/step
Epoch 165/400
6/6 - 0s - loss: 0.0829 - accuracy: 0.5880 - val_loss: 0.0816 - val_accuracy: 0.5896 - 44ms/epoch - 7ms/step
Epoch 166/400
6/6 - 0s - loss: 0.0793 - accuracy: 0.6041 - val_loss: 0.0809 - val_accuracy: 0.5958 - 44ms/epoch - 7ms/step
Epoch 167/400
6/6 - 0s - loss: 0.0794 - accuracy: 0.6086 - val_loss: 0.0809 - val_accuracy: 0.5958 - 58ms/epoch - 10ms/step
Epoch 168/400
6/6 - 0s - loss: 0.0812 - accuracy: 0.5961 - val_loss: 0.0815 - val_accuracy: 0.5938 - 46ms/epoch - 8ms/step
Epoch 169/400
6/6 - 0s - loss: 0.0817 - accuracy: 0.5943 - val_loss: 0.0813 - val_accuracy: 0.5938 - 58ms/epoch - 10ms/step
Epoch 170/400
6/6 - 0s - loss: 0.0808 - accuracy: 0.5952 - val_loss: 0.0818 - val_accuracy: 0.5917 - 43ms/epoch - 7ms/step
Epoch 171/400
6/6 - 0s - loss: 0.0790 - accuracy: 0.6130 - val_loss: 0.0813 - val_accuracy: 0.5938 - 44ms/epoch - 7ms/step
Epoch 172/400
6/6 - 0s - loss: 0.0792 - accuracy: 0.6059 - val_loss: 0.0808 - val_accuracy: 0.5938 - 59ms/epoch - 10ms/step
Epoch 173/400
6/6 - 0s - loss: 0.0793 - accuracy: 0.6059 - val_loss: 0.0804 - val_accuracy: 0.5979 - 45ms/epoch - 7ms/step
Epoch 174/400
6/6 - 0s - loss: 0.0776 - accuracy: 0.6148 - val_loss: 0.0796 - val_accuracy: 0.6042 - 44ms/epoch - 7ms/step
Epoch 175/400
6/6 - 0s - loss: 0.0784 - accuracy: 0.6139 - val_loss: 0.0793 - val_accuracy: 0.6042 - 44ms/epoch - 7ms/step
Epoch 176/400
6/6 - 0s - loss: 0.0792 - accuracy: 0.6086 - val_loss: 0.0805 - val_accuracy: 0.5958 - 57ms/epoch - 9ms/step
Epoch 177/400
6/6 - 0s - loss: 0.0790 - accuracy: 0.6095 - val_loss: 0.0825 - val_accuracy: 0.5875 - 43ms/epoch - 7ms/step
Epoch 178/400
6/6 - 0s - loss: 0.0786 - accuracy: 0.6104 - val_loss: 0.0822 - val_accuracy: 0.5854 - 45ms/epoch - 7ms/step
Epoch 179/400
6/6 - 0s - loss: 0.0775 - accuracy: 0.6166 - val_loss: 0.0822 - val_accuracy: 0.5854 - 51ms/epoch - 9ms/step
Epoch 180/400
6/6 - 0s - loss: 0.0776 - accuracy: 0.6157 - val_loss: 0.0820 - val_accuracy: 0.5896 - 50ms/epoch - 8ms/step
Epoch 181/400
6/6 - 0s - loss: 0.0779 - accuracy: 0.6113 - val_loss: 0.0816 - val_accuracy: 0.5938 - 45ms/epoch - 7ms/step
Epoch 182/400
6/6 - 0s - loss: 0.0775 - accuracy: 0.6157 - val_loss: 0.0817 - val_accuracy: 0.5917 - 58ms/epoch - 10ms/step
Epoch 183/400
6/6 - 0s - loss: 0.0779 - accuracy: 0.6139 - val_loss: 0.0817 - val_accuracy: 0.5896 - 51ms/epoch - 8ms/step
Epoch 184/400
6/6 - 0s - loss: 0.0774 - accuracy: 0.6122 - val_loss: 0.0808 - val_accuracy: 0.5979 - 43ms/epoch - 7ms/step
Epoch 185/400
6/6 - 0s - loss: 0.0785 - accuracy: 0.6095 - val_loss: 0.0811 - val_accuracy: 0.5958 - 44ms/epoch - 7ms/step
Epoch 186/400
6/6 - 0s - loss: 0.0777 - accuracy: 0.6122 - val_loss: 0.0816 - val_accuracy: 0.5896 - 43ms/epoch - 7ms/step
Epoch 187/400
6/6 - 0s - loss: 0.0777 - accuracy: 0.6130 - val_loss: 0.0823 - val_accuracy: 0.5833 - 61ms/epoch - 10ms/step
Epoch 188/400
6/6 - 0s - loss: 0.0767 - accuracy: 0.6175 - val_loss: 0.0815 - val_accuracy: 0.5938 - 45ms/epoch - 7ms/step
Epoch 189/400
6/6 - 0s - loss: 0.0772 - accuracy: 0.6175 - val_loss: 0.0816 - val_accuracy: 0.5938 - 46ms/epoch - 8ms/step
Epoch 190/400
6/6 - 0s - loss: 0.0773 - accuracy: 0.6148 - val_loss: 0.0804 - val_accuracy: 0.6000 - 46ms/epoch - 8ms/step
Epoch 191/400
6/6 - 0s - loss: 0.0794 - accuracy: 0.6068 - val_loss: 0.0800 - val_accuracy: 0.6000 - 47ms/epoch - 8ms/step
Epoch 192/400
6/6 - 0s - loss: 0.0780 - accuracy: 0.6148 - val_loss: 0.0800 - val_accuracy: 0.5979 - 44ms/epoch - 7ms/step
Epoch 193/400
6/6 - 0s - loss: 0.0769 - accuracy: 0.6166 - val_loss: 0.0804 - val_accuracy: 0.5979 - 44ms/epoch - 7ms/step
Epoch 194/400
6/6 - 0s - loss: 0.0776 - accuracy: 0.6166 - val_loss: 0.0801 - val_accuracy: 0.5979 - 59ms/epoch - 10ms/step
Epoch 195/400
6/6 - 0s - loss: 0.0759 - accuracy: 0.6211 - val_loss: 0.0794 - val_accuracy: 0.6042 - 58ms/epoch - 10ms/step
Epoch 196/400
6/6 - 0s - loss: 0.0768 - accuracy: 0.6175 - val_loss: 0.0793 - val_accuracy: 0.6042 - 58ms/epoch - 10ms/step
Epoch 197/400
6/6 - 0s - loss: 0.0771 - accuracy: 0.6157 - val_loss: 0.0794 - val_accuracy: 0.6042 - 57ms/epoch - 10ms/step
Epoch 198/400
6/6 - 0s - loss: 0.0778 - accuracy: 0.6139 - val_loss: 0.0795 - val_accuracy: 0.6021 - 48ms/epoch - 8ms/step
Epoch 199/400
6/6 - 0s - loss: 0.0767 - accuracy: 0.6193 - val_loss: 0.0789 - val_accuracy: 0.6083 - 45ms/epoch - 7ms/step
Epoch 200/400
6/6 - 0s - loss: 0.0777 - accuracy: 0.6130 - val_loss: 0.0790 - val_accuracy: 0.6021 - 45ms/epoch - 7ms/step
Epoch 201/400
6/6 - 0s - loss: 0.0766 - accuracy: 0.6220 - val_loss: 0.0795 - val_accuracy: 0.6021 - 47ms/epoch - 8ms/step
Epoch 202/400
6/6 - 0s - loss: 0.0757 - accuracy: 0.6220 - val_loss: 0.0799 - val_accuracy: 0.6000 - 57ms/epoch - 10ms/step
Epoch 203/400
6/6 - 0s - loss: 0.0755 - accuracy: 0.6265 - val_loss: 0.0800 - val_accuracy: 0.5979 - 46ms/epoch - 8ms/step
Epoch 204/400
6/6 - 0s - loss: 0.0759 - accuracy: 0.6220 - val_loss: 0.0795 - val_accuracy: 0.6042 - 44ms/epoch - 7ms/step
Epoch 205/400
6/6 - 0s - loss: 0.0775 - accuracy: 0.6157 - val_loss: 0.0789 - val_accuracy: 0.6062 - 58ms/epoch - 10ms/step
Epoch 206/400
6/6 - 0s - loss: 0.0760 - accuracy: 0.6238 - val_loss: 0.0796 - val_accuracy: 0.6042 - 43ms/epoch - 7ms/step
Epoch 207/400
6/6 - 0s - loss: 0.0751 - accuracy: 0.6256 - val_loss: 0.0795 - val_accuracy: 0.6042 - 43ms/epoch - 7ms/step
Epoch 208/400
6/6 - 0s - loss: 0.0769 - accuracy: 0.6175 - val_loss: 0.0799 - val_accuracy: 0.6000 - 59ms/epoch - 10ms/step
Epoch 209/400
6/6 - 0s - loss: 0.0758 - accuracy: 0.6202 - val_loss: 0.0795 - val_accuracy: 0.6042 - 44ms/epoch - 7ms/step
Epoch 210/400
6/6 - 0s - loss: 0.0765 - accuracy: 0.6184 - val_loss: 0.0793 - val_accuracy: 0.6042 - 43ms/epoch - 7ms/step
Epoch 211/400
6/6 - 0s - loss: 0.0751 - accuracy: 0.6256 - val_loss: 0.0794 - val_accuracy: 0.6042 - 47ms/epoch - 8ms/step
Epoch 212/400
6/6 - 0s - loss: 0.0764 - accuracy: 0.6193 - val_loss: 0.0802 - val_accuracy: 0.5979 - 61ms/epoch - 10ms/step
Epoch 213/400
6/6 - 0s - loss: 0.0766 - accuracy: 0.6193 - val_loss: 0.0801 - val_accuracy: 0.6000 - 51ms/epoch - 8ms/step
Epoch 214/400
6/6 - 0s - loss: 0.0762 - accuracy: 0.6184 - val_loss: 0.0799 - val_accuracy: 0.6000 - 52ms/epoch - 9ms/step
Epoch 215/400
6/6 - 0s - loss: 0.0758 - accuracy: 0.6220 - val_loss: 0.0788 - val_accuracy: 0.6062 - 47ms/epoch - 8ms/step
Epoch 216/400
6/6 - 0s - loss: 0.0759 - accuracy: 0.6229 - val_loss: 0.0789 - val_accuracy: 0.6062 - 59ms/epoch - 10ms/step
Epoch 217/400
6/6 - 0s - loss: 0.0767 - accuracy: 0.6166 - val_loss: 0.0790 - val_accuracy: 0.6062 - 54ms/epoch - 9ms/step
Epoch 218/400
6/6 - 0s - loss: 0.0759 - accuracy: 0.6247 - val_loss: 0.0807 - val_accuracy: 0.5979 - 48ms/epoch - 8ms/step
Epoch 219/400
6/6 - 0s - loss: 0.0760 - accuracy: 0.6229 - val_loss: 0.0817 - val_accuracy: 0.5917 - 60ms/epoch - 10ms/step
Epoch 220/400
6/6 - 0s - loss: 0.0767 - accuracy: 0.6157 - val_loss: 0.0814 - val_accuracy: 0.5958 - 57ms/epoch - 9ms/step
Epoch 221/400
6/6 - 0s - loss: 0.0754 - accuracy: 0.6265 - val_loss: 0.0805 - val_accuracy: 0.5979 - 45ms/epoch - 7ms/step
Epoch 222/400
6/6 - 0s - loss: 0.0759 - accuracy: 0.6229 - val_loss: 0.0787 - val_accuracy: 0.6083 - 48ms/epoch - 8ms/step
Epoch 223/400
6/6 - 0s - loss: 0.0763 - accuracy: 0.6175 - val_loss: 0.0791 - val_accuracy: 0.6042 - 58ms/epoch - 10ms/step
Epoch 224/400
6/6 - 0s - loss: 0.0764 - accuracy: 0.6202 - val_loss: 0.0793 - val_accuracy: 0.6021 - 43ms/epoch - 7ms/step
Epoch 225/400
6/6 - 0s - loss: 0.0754 - accuracy: 0.6247 - val_loss: 0.0794 - val_accuracy: 0.6042 - 44ms/epoch - 7ms/step
Epoch 226/400
6/6 - 0s - loss: 0.0752 - accuracy: 0.6256 - val_loss: 0.0805 - val_accuracy: 0.5958 - 43ms/epoch - 7ms/step
Epoch 227/400
6/6 - 0s - loss: 0.0767 - accuracy: 0.6166 - val_loss: 0.0803 - val_accuracy: 0.5979 - 44ms/epoch - 7ms/step
Epoch 228/400
6/6 - 0s - loss: 0.0747 - accuracy: 0.6265 - val_loss: 0.0800 - val_accuracy: 0.5979 - 60ms/epoch - 10ms/step
Epoch 229/400
6/6 - 0s - loss: 0.0741 - accuracy: 0.6336 - val_loss: 0.0803 - val_accuracy: 0.5979 - 67ms/epoch - 11ms/step
Epoch 230/400
6/6 - 0s - loss: 0.0753 - accuracy: 0.6273 - val_loss: 0.0804 - val_accuracy: 0.5958 - 45ms/epoch - 7ms/step
Epoch 231/400
6/6 - 0s - loss: 0.0751 - accuracy: 0.6273 - val_loss: 0.0801 - val_accuracy: 0.5979 - 60ms/epoch - 10ms/step
Epoch 232/400
6/6 - 0s - loss: 0.0751 - accuracy: 0.6273 - val_loss: 0.0799 - val_accuracy: 0.6000 - 57ms/epoch - 10ms/step
Epoch 233/400
6/6 - 0s - loss: 0.0740 - accuracy: 0.6327 - val_loss: 0.0794 - val_accuracy: 0.6042 - 59ms/epoch - 10ms/step
Epoch 234/400
6/6 - 0s - loss: 0.0744 - accuracy: 0.6282 - val_loss: 0.0792 - val_accuracy: 0.6042 - 60ms/epoch - 10ms/step
Epoch 235/400
6/6 - 0s - loss: 0.0737 - accuracy: 0.6336 - val_loss: 0.0791 - val_accuracy: 0.6042 - 50ms/epoch - 8ms/step
Epoch 236/400
6/6 - 0s - loss: 0.0741 - accuracy: 0.6309 - val_loss: 0.0794 - val_accuracy: 0.6062 - 63ms/epoch - 11ms/step
Epoch 237/400
6/6 - 0s - loss: 0.0746 - accuracy: 0.6300 - val_loss: 0.0789 - val_accuracy: 0.6042 - 45ms/epoch - 8ms/step
Epoch 238/400
6/6 - 0s - loss: 0.0751 - accuracy: 0.6291 - val_loss: 0.0780 - val_accuracy: 0.6104 - 47ms/epoch - 8ms/step
Epoch 239/400
6/6 - 0s - loss: 0.0740 - accuracy: 0.6318 - val_loss: 0.0779 - val_accuracy: 0.6104 - 46ms/epoch - 8ms/step
Epoch 240/400
6/6 - 0s - loss: 0.0754 - accuracy: 0.6256 - val_loss: 0.0793 - val_accuracy: 0.6021 - 47ms/epoch - 8ms/step
Epoch 241/400
6/6 - 0s - loss: 0.0749 - accuracy: 0.6291 - val_loss: 0.0795 - val_accuracy: 0.6021 - 43ms/epoch - 7ms/step
Epoch 242/400
6/6 - 0s - loss: 0.0747 - accuracy: 0.6282 - val_loss: 0.0802 - val_accuracy: 0.6000 - 59ms/epoch - 10ms/step
Epoch 243/400
6/6 - 0s - loss: 0.0748 - accuracy: 0.6273 - val_loss: 0.0805 - val_accuracy: 0.5979 - 61ms/epoch - 10ms/step
Epoch 244/400
6/6 - 0s - loss: 0.0738 - accuracy: 0.6327 - val_loss: 0.0803 - val_accuracy: 0.5979 - 52ms/epoch - 9ms/step
Epoch 245/400
6/6 - 0s - loss: 0.0752 - accuracy: 0.6256 - val_loss: 0.0808 - val_accuracy: 0.5958 - 57ms/epoch - 9ms/step
Epoch 246/400
6/6 - 0s - loss: 0.0748 - accuracy: 0.6300 - val_loss: 0.0816 - val_accuracy: 0.5917 - 59ms/epoch - 10ms/step
Epoch 247/400
6/6 - 0s - loss: 0.0742 - accuracy: 0.6327 - val_loss: 0.0814 - val_accuracy: 0.5938 - 60ms/epoch - 10ms/step
Epoch 248/400
6/6 - 0s - loss: 0.0751 - accuracy: 0.6273 - val_loss: 0.0808 - val_accuracy: 0.5958 - 44ms/epoch - 7ms/step
Epoch 249/400
6/6 - 0s - loss: 0.0739 - accuracy: 0.6309 - val_loss: 0.0793 - val_accuracy: 0.6042 - 46ms/epoch - 8ms/step
Epoch 250/400
6/6 - 0s - loss: 0.0749 - accuracy: 0.6256 - val_loss: 0.0792 - val_accuracy: 0.6042 - 45ms/epoch - 8ms/step
Epoch 251/400
6/6 - 0s - loss: 0.0744 - accuracy: 0.6336 - val_loss: 0.0788 - val_accuracy: 0.6062 - 43ms/epoch - 7ms/step
Epoch 252/400
6/6 - 0s - loss: 0.0746 - accuracy: 0.6309 - val_loss: 0.0799 - val_accuracy: 0.6000 - 44ms/epoch - 7ms/step
Epoch 253/400
6/6 - 0s - loss: 0.0742 - accuracy: 0.6291 - val_loss: 0.0797 - val_accuracy: 0.6021 - 52ms/epoch - 9ms/step
Epoch 254/400
6/6 - 0s - loss: 0.0748 - accuracy: 0.6309 - val_loss: 0.0799 - val_accuracy: 0.6021 - 45ms/epoch - 7ms/step
Epoch 255/400
6/6 - 0s - loss: 0.0741 - accuracy: 0.6354 - val_loss: 0.0779 - val_accuracy: 0.6104 - 57ms/epoch - 9ms/step
Epoch 256/400
6/6 - 0s - loss: 0.0745 - accuracy: 0.6309 - val_loss: 0.0778 - val_accuracy: 0.6104 - 43ms/epoch - 7ms/step
Epoch 257/400
6/6 - 0s - loss: 0.0744 - accuracy: 0.6318 - val_loss: 0.0787 - val_accuracy: 0.6042 - 63ms/epoch - 11ms/step
Epoch 258/400
6/6 - 0s - loss: 0.0742 - accuracy: 0.6318 - val_loss: 0.0795 - val_accuracy: 0.6021 - 59ms/epoch - 10ms/step
Epoch 259/400
6/6 - 0s - loss: 0.0732 - accuracy: 0.6354 - val_loss: 0.0792 - val_accuracy: 0.6042 - 57ms/epoch - 9ms/step
Epoch 260/400
6/6 - 0s - loss: 0.0744 - accuracy: 0.6282 - val_loss: 0.0788 - val_accuracy: 0.6062 - 44ms/epoch - 7ms/step
Epoch 261/400
6/6 - 0s - loss: 0.0743 - accuracy: 0.6327 - val_loss: 0.0795 - val_accuracy: 0.6021 - 45ms/epoch - 7ms/step
Epoch 262/400
6/6 - 0s - loss: 0.0744 - accuracy: 0.6282 - val_loss: 0.0802 - val_accuracy: 0.5979 - 44ms/epoch - 7ms/step
Epoch 263/400
6/6 - 0s - loss: 0.0740 - accuracy: 0.6291 - val_loss: 0.0809 - val_accuracy: 0.5958 - 42ms/epoch - 7ms/step
Epoch 264/400
6/6 - 0s - loss: 0.0742 - accuracy: 0.6336 - val_loss: 0.0807 - val_accuracy: 0.5958 - 59ms/epoch - 10ms/step
Epoch 265/400
6/6 - 0s - loss: 0.0743 - accuracy: 0.6282 - val_loss: 0.0799 - val_accuracy: 0.6000 - 58ms/epoch - 10ms/step
Epoch 266/400
6/6 - 0s - loss: 0.0745 - accuracy: 0.6291 - val_loss: 0.0796 - val_accuracy: 0.6021 - 46ms/epoch - 8ms/step
Epoch 267/400
6/6 - 0s - loss: 0.0739 - accuracy: 0.6327 - val_loss: 0.0797 - val_accuracy: 0.6021 - 60ms/epoch - 10ms/step
Epoch 268/400
6/6 - 0s - loss: 0.0742 - accuracy: 0.6327 - val_loss: 0.0806 - val_accuracy: 0.5958 - 46ms/epoch - 8ms/step
Epoch 269/400
6/6 - 0s - loss: 0.0740 - accuracy: 0.6336 - val_loss: 0.0791 - val_accuracy: 0.6042 - 59ms/epoch - 10ms/step
Epoch 270/400
6/6 - 0s - loss: 0.0738 - accuracy: 0.6345 - val_loss: 0.0785 - val_accuracy: 0.6083 - 60ms/epoch - 10ms/step
Epoch 271/400
6/6 - 0s - loss: 0.0733 - accuracy: 0.6354 - val_loss: 0.0786 - val_accuracy: 0.6062 - 68ms/epoch - 11ms/step
Epoch 272/400
6/6 - 0s - loss: 0.0742 - accuracy: 0.6327 - val_loss: 0.0785 - val_accuracy: 0.6062 - 58ms/epoch - 10ms/step
Epoch 273/400
6/6 - 0s - loss: 0.0741 - accuracy: 0.6300 - val_loss: 0.0781 - val_accuracy: 0.6104 - 44ms/epoch - 7ms/step
Epoch 274/400
6/6 - 0s - loss: 0.0746 - accuracy: 0.6309 - val_loss: 0.0787 - val_accuracy: 0.6062 - 59ms/epoch - 10ms/step
Epoch 275/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6381 - val_loss: 0.0800 - val_accuracy: 0.6000 - 46ms/epoch - 8ms/step
Epoch 276/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6363 - val_loss: 0.0800 - val_accuracy: 0.6000 - 43ms/epoch - 7ms/step
Epoch 277/400
6/6 - 0s - loss: 0.0744 - accuracy: 0.6318 - val_loss: 0.0797 - val_accuracy: 0.6021 - 47ms/epoch - 8ms/step
Epoch 278/400
6/6 - 0s - loss: 0.0729 - accuracy: 0.6390 - val_loss: 0.0795 - val_accuracy: 0.6021 - 45ms/epoch - 8ms/step
Epoch 279/400
6/6 - 0s - loss: 0.0732 - accuracy: 0.6354 - val_loss: 0.0793 - val_accuracy: 0.6021 - 47ms/epoch - 8ms/step
Epoch 280/400
6/6 - 0s - loss: 0.0735 - accuracy: 0.6336 - val_loss: 0.0793 - val_accuracy: 0.6042 - 58ms/epoch - 10ms/step
Epoch 281/400
6/6 - 0s - loss: 0.0737 - accuracy: 0.6327 - val_loss: 0.0792 - val_accuracy: 0.6062 - 44ms/epoch - 7ms/step
Epoch 282/400
6/6 - 0s - loss: 0.0749 - accuracy: 0.6256 - val_loss: 0.0791 - val_accuracy: 0.6042 - 44ms/epoch - 7ms/step
Epoch 283/400
6/6 - 0s - loss: 0.0728 - accuracy: 0.6381 - val_loss: 0.0790 - val_accuracy: 0.6062 - 58ms/epoch - 10ms/step
Epoch 284/400
6/6 - 0s - loss: 0.0758 - accuracy: 0.6238 - val_loss: 0.0797 - val_accuracy: 0.6000 - 61ms/epoch - 10ms/step
Epoch 285/400
6/6 - 0s - loss: 0.0738 - accuracy: 0.6309 - val_loss: 0.0782 - val_accuracy: 0.6083 - 45ms/epoch - 8ms/step
Epoch 286/400
6/6 - 0s - loss: 0.0735 - accuracy: 0.6354 - val_loss: 0.0776 - val_accuracy: 0.6125 - 49ms/epoch - 8ms/step
Epoch 287/400
6/6 - 0s - loss: 0.0742 - accuracy: 0.6309 - val_loss: 0.0775 - val_accuracy: 0.6125 - 47ms/epoch - 8ms/step
Epoch 288/400
6/6 - 0s - loss: 0.0740 - accuracy: 0.6336 - val_loss: 0.0780 - val_accuracy: 0.6104 - 47ms/epoch - 8ms/step
Epoch 289/400
6/6 - 0s - loss: 0.0737 - accuracy: 0.6336 - val_loss: 0.0791 - val_accuracy: 0.6042 - 52ms/epoch - 9ms/step
Epoch 290/400
6/6 - 0s - loss: 0.0732 - accuracy: 0.6363 - val_loss: 0.0792 - val_accuracy: 0.6042 - 46ms/epoch - 8ms/step
Epoch 291/400
6/6 - 0s - loss: 0.0743 - accuracy: 0.6327 - val_loss: 0.0788 - val_accuracy: 0.6062 - 51ms/epoch - 9ms/step
Epoch 292/400
6/6 - 0s - loss: 0.0739 - accuracy: 0.6327 - val_loss: 0.0789 - val_accuracy: 0.6062 - 50ms/epoch - 8ms/step
Epoch 293/400
6/6 - 0s - loss: 0.0743 - accuracy: 0.6309 - val_loss: 0.0792 - val_accuracy: 0.6042 - 48ms/epoch - 8ms/step
Epoch 294/400
6/6 - 0s - loss: 0.0739 - accuracy: 0.6327 - val_loss: 0.0789 - val_accuracy: 0.6062 - 50ms/epoch - 8ms/step
Epoch 295/400
6/6 - 0s - loss: 0.0736 - accuracy: 0.6336 - val_loss: 0.0773 - val_accuracy: 0.6125 - 63ms/epoch - 11ms/step
Epoch 296/400
6/6 - 0s - loss: 0.0735 - accuracy: 0.6354 - val_loss: 0.0770 - val_accuracy: 0.6146 - 43ms/epoch - 7ms/step
Epoch 297/400
6/6 - 0s - loss: 0.0747 - accuracy: 0.6256 - val_loss: 0.0767 - val_accuracy: 0.6167 - 44ms/epoch - 7ms/step
Epoch 298/400
6/6 - 0s - loss: 0.0735 - accuracy: 0.6345 - val_loss: 0.0767 - val_accuracy: 0.6167 - 45ms/epoch - 8ms/step
Epoch 299/400
6/6 - 0s - loss: 0.0743 - accuracy: 0.6345 - val_loss: 0.0770 - val_accuracy: 0.6167 - 48ms/epoch - 8ms/step
Epoch 300/400
6/6 - 0s - loss: 0.0732 - accuracy: 0.6363 - val_loss: 0.0768 - val_accuracy: 0.6167 - 60ms/epoch - 10ms/step
Epoch 301/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6354 - val_loss: 0.0779 - val_accuracy: 0.6104 - 60ms/epoch - 10ms/step
Epoch 302/400
6/6 - 0s - loss: 0.0741 - accuracy: 0.6336 - val_loss: 0.0781 - val_accuracy: 0.6083 - 46ms/epoch - 8ms/step
Epoch 303/400
6/6 - 0s - loss: 0.0728 - accuracy: 0.6408 - val_loss: 0.0777 - val_accuracy: 0.6125 - 45ms/epoch - 7ms/step
Epoch 304/400
6/6 - 0s - loss: 0.0736 - accuracy: 0.6345 - val_loss: 0.0788 - val_accuracy: 0.6062 - 45ms/epoch - 8ms/step
Epoch 305/400
6/6 - 0s - loss: 0.0733 - accuracy: 0.6354 - val_loss: 0.0788 - val_accuracy: 0.6062 - 45ms/epoch - 8ms/step
Epoch 306/400
6/6 - 0s - loss: 0.0733 - accuracy: 0.6354 - val_loss: 0.0789 - val_accuracy: 0.6042 - 58ms/epoch - 10ms/step
Epoch 307/400
6/6 - 0s - loss: 0.0732 - accuracy: 0.6372 - val_loss: 0.0784 - val_accuracy: 0.6083 - 64ms/epoch - 11ms/step
Epoch 308/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6363 - val_loss: 0.0785 - val_accuracy: 0.6083 - 49ms/epoch - 8ms/step
Epoch 309/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6354 - val_loss: 0.0781 - val_accuracy: 0.6104 - 61ms/epoch - 10ms/step
Epoch 310/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6372 - val_loss: 0.0793 - val_accuracy: 0.6021 - 79ms/epoch - 13ms/step
Epoch 311/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6416 - val_loss: 0.0791 - val_accuracy: 0.6042 - 68ms/epoch - 11ms/step
Epoch 312/400
6/6 - 0s - loss: 0.0729 - accuracy: 0.6390 - val_loss: 0.0791 - val_accuracy: 0.6042 - 78ms/epoch - 13ms/step
Epoch 313/400
6/6 - 0s - loss: 0.0722 - accuracy: 0.6390 - val_loss: 0.0783 - val_accuracy: 0.6083 - 66ms/epoch - 11ms/step
Epoch 314/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6363 - val_loss: 0.0786 - val_accuracy: 0.6062 - 82ms/epoch - 14ms/step
Epoch 315/400
6/6 - 0s - loss: 0.0730 - accuracy: 0.6372 - val_loss: 0.0795 - val_accuracy: 0.6021 - 74ms/epoch - 12ms/step
Epoch 316/400
6/6 - 0s - loss: 0.0719 - accuracy: 0.6443 - val_loss: 0.0798 - val_accuracy: 0.6000 - 75ms/epoch - 12ms/step
Epoch 317/400
6/6 - 0s - loss: 0.0722 - accuracy: 0.6434 - val_loss: 0.0791 - val_accuracy: 0.6062 - 76ms/epoch - 13ms/step
Epoch 318/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6434 - val_loss: 0.0791 - val_accuracy: 0.6042 - 72ms/epoch - 12ms/step
Epoch 319/400
6/6 - 0s - loss: 0.0726 - accuracy: 0.6372 - val_loss: 0.0777 - val_accuracy: 0.6104 - 75ms/epoch - 12ms/step
Epoch 320/400
6/6 - 0s - loss: 0.0730 - accuracy: 0.6381 - val_loss: 0.0793 - val_accuracy: 0.6021 - 74ms/epoch - 12ms/step
Epoch 321/400
6/6 - 0s - loss: 0.0725 - accuracy: 0.6408 - val_loss: 0.0801 - val_accuracy: 0.6000 - 79ms/epoch - 13ms/step
Epoch 322/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6408 - val_loss: 0.0803 - val_accuracy: 0.5979 - 71ms/epoch - 12ms/step
Epoch 323/400
6/6 - 0s - loss: 0.0717 - accuracy: 0.6443 - val_loss: 0.0803 - val_accuracy: 0.5979 - 74ms/epoch - 12ms/step
Epoch 324/400
6/6 - 0s - loss: 0.0723 - accuracy: 0.6381 - val_loss: 0.0791 - val_accuracy: 0.6021 - 64ms/epoch - 11ms/step
Epoch 325/400
6/6 - 0s - loss: 0.0727 - accuracy: 0.6363 - val_loss: 0.0784 - val_accuracy: 0.6104 - 76ms/epoch - 13ms/step
Epoch 326/400
6/6 - 0s - loss: 0.0718 - accuracy: 0.6416 - val_loss: 0.0803 - val_accuracy: 0.5979 - 74ms/epoch - 12ms/step
Epoch 327/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6372 - val_loss: 0.0804 - val_accuracy: 0.5979 - 64ms/epoch - 11ms/step
Epoch 328/400
6/6 - 0s - loss: 0.0728 - accuracy: 0.6363 - val_loss: 0.0794 - val_accuracy: 0.6021 - 74ms/epoch - 12ms/step
Epoch 329/400
6/6 - 0s - loss: 0.0718 - accuracy: 0.6416 - val_loss: 0.0787 - val_accuracy: 0.6062 - 78ms/epoch - 13ms/step
Epoch 330/400
6/6 - 0s - loss: 0.0732 - accuracy: 0.6354 - val_loss: 0.0782 - val_accuracy: 0.6104 - 78ms/epoch - 13ms/step
Epoch 331/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6408 - val_loss: 0.0796 - val_accuracy: 0.6021 - 77ms/epoch - 13ms/step
Epoch 332/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6416 - val_loss: 0.0807 - val_accuracy: 0.5958 - 75ms/epoch - 12ms/step
Epoch 333/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6425 - val_loss: 0.0805 - val_accuracy: 0.5979 - 75ms/epoch - 13ms/step
Epoch 334/400
6/6 - 0s - loss: 0.0729 - accuracy: 0.6390 - val_loss: 0.0788 - val_accuracy: 0.6062 - 75ms/epoch - 12ms/step
Epoch 335/400
6/6 - 0s - loss: 0.0715 - accuracy: 0.6425 - val_loss: 0.0788 - val_accuracy: 0.6062 - 74ms/epoch - 12ms/step
Epoch 336/400
6/6 - 0s - loss: 0.0723 - accuracy: 0.6416 - val_loss: 0.0788 - val_accuracy: 0.6062 - 73ms/epoch - 12ms/step
Epoch 337/400
6/6 - 0s - loss: 0.0714 - accuracy: 0.6452 - val_loss: 0.0789 - val_accuracy: 0.6062 - 68ms/epoch - 11ms/step
Epoch 338/400
6/6 - 0s - loss: 0.0717 - accuracy: 0.6408 - val_loss: 0.0799 - val_accuracy: 0.6000 - 80ms/epoch - 13ms/step
Epoch 339/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6443 - val_loss: 0.0806 - val_accuracy: 0.5958 - 65ms/epoch - 11ms/step
Epoch 340/400
6/6 - 0s - loss: 0.0715 - accuracy: 0.6452 - val_loss: 0.0801 - val_accuracy: 0.6000 - 70ms/epoch - 12ms/step
Epoch 341/400
6/6 - 0s - loss: 0.0715 - accuracy: 0.6425 - val_loss: 0.0795 - val_accuracy: 0.6042 - 73ms/epoch - 12ms/step
Epoch 342/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6408 - val_loss: 0.0793 - val_accuracy: 0.6021 - 62ms/epoch - 10ms/step
Epoch 343/400
6/6 - 0s - loss: 0.0714 - accuracy: 0.6425 - val_loss: 0.0794 - val_accuracy: 0.6021 - 78ms/epoch - 13ms/step
Epoch 344/400
6/6 - 0s - loss: 0.0717 - accuracy: 0.6425 - val_loss: 0.0789 - val_accuracy: 0.6062 - 80ms/epoch - 13ms/step
Epoch 345/400
6/6 - 0s - loss: 0.0722 - accuracy: 0.6399 - val_loss: 0.0790 - val_accuracy: 0.6042 - 75ms/epoch - 13ms/step
Epoch 346/400
6/6 - 0s - loss: 0.0716 - accuracy: 0.6425 - val_loss: 0.0785 - val_accuracy: 0.6062 - 68ms/epoch - 11ms/step
Epoch 347/400
6/6 - 0s - loss: 0.0717 - accuracy: 0.6425 - val_loss: 0.0802 - val_accuracy: 0.5979 - 74ms/epoch - 12ms/step
Epoch 348/400
6/6 - 0s - loss: 0.0710 - accuracy: 0.6470 - val_loss: 0.0802 - val_accuracy: 0.6000 - 78ms/epoch - 13ms/step
Epoch 349/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6416 - val_loss: 0.0796 - val_accuracy: 0.6000 - 68ms/epoch - 11ms/step
Epoch 350/400
6/6 - 0s - loss: 0.0710 - accuracy: 0.6479 - val_loss: 0.0796 - val_accuracy: 0.6021 - 71ms/epoch - 12ms/step
Epoch 351/400
6/6 - 0s - loss: 0.0712 - accuracy: 0.6461 - val_loss: 0.0793 - val_accuracy: 0.6042 - 65ms/epoch - 11ms/step
Epoch 352/400
6/6 - 0s - loss: 0.0726 - accuracy: 0.6399 - val_loss: 0.0790 - val_accuracy: 0.6042 - 74ms/epoch - 12ms/step
Epoch 353/400
6/6 - 0s - loss: 0.0726 - accuracy: 0.6399 - val_loss: 0.0791 - val_accuracy: 0.6042 - 75ms/epoch - 12ms/step
Epoch 354/400
6/6 - 0s - loss: 0.0707 - accuracy: 0.6479 - val_loss: 0.0784 - val_accuracy: 0.6083 - 59ms/epoch - 10ms/step
Epoch 355/400
6/6 - 0s - loss: 0.0715 - accuracy: 0.6425 - val_loss: 0.0786 - val_accuracy: 0.6083 - 46ms/epoch - 8ms/step
Epoch 356/400
6/6 - 0s - loss: 0.0711 - accuracy: 0.6452 - val_loss: 0.0782 - val_accuracy: 0.6104 - 49ms/epoch - 8ms/step
Epoch 357/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6434 - val_loss: 0.0783 - val_accuracy: 0.6083 - 45ms/epoch - 7ms/step
Epoch 358/400
6/6 - 0s - loss: 0.0717 - accuracy: 0.6425 - val_loss: 0.0799 - val_accuracy: 0.6000 - 45ms/epoch - 7ms/step
Epoch 359/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6434 - val_loss: 0.0800 - val_accuracy: 0.6000 - 45ms/epoch - 8ms/step
Epoch 360/400
6/6 - 0s - loss: 0.0719 - accuracy: 0.6416 - val_loss: 0.0799 - val_accuracy: 0.6021 - 61ms/epoch - 10ms/step
Epoch 361/400
6/6 - 0s - loss: 0.0714 - accuracy: 0.6461 - val_loss: 0.0792 - val_accuracy: 0.6042 - 60ms/epoch - 10ms/step
Epoch 362/400
6/6 - 0s - loss: 0.0730 - accuracy: 0.6354 - val_loss: 0.0779 - val_accuracy: 0.6104 - 48ms/epoch - 8ms/step
Epoch 363/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6416 - val_loss: 0.0769 - val_accuracy: 0.6167 - 54ms/epoch - 9ms/step
Epoch 364/400
6/6 - 0s - loss: 0.0718 - accuracy: 0.6416 - val_loss: 0.0775 - val_accuracy: 0.6104 - 60ms/epoch - 10ms/step
Epoch 365/400
6/6 - 0s - loss: 0.0714 - accuracy: 0.6452 - val_loss: 0.0785 - val_accuracy: 0.6083 - 47ms/epoch - 8ms/step
Epoch 366/400
6/6 - 0s - loss: 0.0713 - accuracy: 0.6470 - val_loss: 0.0795 - val_accuracy: 0.6021 - 60ms/epoch - 10ms/step
Epoch 367/400
6/6 - 0s - loss: 0.0712 - accuracy: 0.6443 - val_loss: 0.0798 - val_accuracy: 0.6021 - 51ms/epoch - 9ms/step
Epoch 368/400
6/6 - 0s - loss: 0.0716 - accuracy: 0.6443 - val_loss: 0.0794 - val_accuracy: 0.6021 - 59ms/epoch - 10ms/step
Epoch 369/400
6/6 - 0s - loss: 0.0713 - accuracy: 0.6470 - val_loss: 0.0791 - val_accuracy: 0.6021 - 49ms/epoch - 8ms/step
Epoch 370/400
6/6 - 0s - loss: 0.0707 - accuracy: 0.6470 - val_loss: 0.0779 - val_accuracy: 0.6104 - 59ms/epoch - 10ms/step
Epoch 371/400
6/6 - 0s - loss: 0.0711 - accuracy: 0.6497 - val_loss: 0.0780 - val_accuracy: 0.6104 - 61ms/epoch - 10ms/step
Epoch 372/400
6/6 - 0s - loss: 0.0711 - accuracy: 0.6452 - val_loss: 0.0793 - val_accuracy: 0.6021 - 49ms/epoch - 8ms/step
Epoch 373/400
6/6 - 0s - loss: 0.0715 - accuracy: 0.6452 - val_loss: 0.0792 - val_accuracy: 0.6042 - 50ms/epoch - 8ms/step
Epoch 374/400
6/6 - 0s - loss: 0.0719 - accuracy: 0.6408 - val_loss: 0.0794 - val_accuracy: 0.6021 - 61ms/epoch - 10ms/step
Epoch 375/400
6/6 - 0s - loss: 0.0711 - accuracy: 0.6461 - val_loss: 0.0773 - val_accuracy: 0.6125 - 47ms/epoch - 8ms/step
Epoch 376/400
6/6 - 0s - loss: 0.0717 - accuracy: 0.6416 - val_loss: 0.0764 - val_accuracy: 0.6167 - 61ms/epoch - 10ms/step
Epoch 377/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6416 - val_loss: 0.0761 - val_accuracy: 0.6187 - 46ms/epoch - 8ms/step
Epoch 378/400
6/6 - 0s - loss: 0.0712 - accuracy: 0.6488 - val_loss: 0.0773 - val_accuracy: 0.6146 - 44ms/epoch - 7ms/step
Epoch 379/400
6/6 - 0s - loss: 0.0708 - accuracy: 0.6479 - val_loss: 0.0791 - val_accuracy: 0.6042 - 48ms/epoch - 8ms/step
Epoch 380/400
6/6 - 0s - loss: 0.0707 - accuracy: 0.6497 - val_loss: 0.0792 - val_accuracy: 0.6042 - 54ms/epoch - 9ms/step
Epoch 381/400
6/6 - 0s - loss: 0.0717 - accuracy: 0.6425 - val_loss: 0.0789 - val_accuracy: 0.6062 - 46ms/epoch - 8ms/step
Epoch 382/400
6/6 - 0s - loss: 0.0722 - accuracy: 0.6416 - val_loss: 0.0793 - val_accuracy: 0.6042 - 58ms/epoch - 10ms/step
Epoch 383/400
6/6 - 0s - loss: 0.0719 - accuracy: 0.6425 - val_loss: 0.0803 - val_accuracy: 0.5979 - 58ms/epoch - 10ms/step
Epoch 384/400
6/6 - 0s - loss: 0.0715 - accuracy: 0.6452 - val_loss: 0.0795 - val_accuracy: 0.6021 - 44ms/epoch - 7ms/step
Epoch 385/400
6/6 - 0s - loss: 0.0710 - accuracy: 0.6461 - val_loss: 0.0794 - val_accuracy: 0.6042 - 59ms/epoch - 10ms/step
Epoch 386/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6372 - val_loss: 0.0796 - val_accuracy: 0.6021 - 53ms/epoch - 9ms/step
Epoch 387/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6425 - val_loss: 0.0797 - val_accuracy: 0.6021 - 45ms/epoch - 7ms/step
Epoch 388/400
6/6 - 0s - loss: 0.0720 - accuracy: 0.6416 - val_loss: 0.0793 - val_accuracy: 0.6042 - 45ms/epoch - 7ms/step
Epoch 389/400
6/6 - 0s - loss: 0.0722 - accuracy: 0.6408 - val_loss: 0.0793 - val_accuracy: 0.6042 - 45ms/epoch - 8ms/step
Epoch 390/400
6/6 - 0s - loss: 0.0706 - accuracy: 0.6488 - val_loss: 0.0786 - val_accuracy: 0.6062 - 46ms/epoch - 8ms/step
Epoch 391/400
6/6 - 0s - loss: 0.0713 - accuracy: 0.6479 - val_loss: 0.0791 - val_accuracy: 0.6021 - 62ms/epoch - 10ms/step
Epoch 392/400
6/6 - 0s - loss: 0.0717 - accuracy: 0.6425 - val_loss: 0.0789 - val_accuracy: 0.6042 - 58ms/epoch - 10ms/step
Epoch 393/400
6/6 - 0s - loss: 0.0715 - accuracy: 0.6434 - val_loss: 0.0789 - val_accuracy: 0.6062 - 50ms/epoch - 8ms/step
Epoch 394/400
6/6 - 0s - loss: 0.0711 - accuracy: 0.6497 - val_loss: 0.0798 - val_accuracy: 0.6021 - 64ms/epoch - 11ms/step
Epoch 395/400
6/6 - 0s - loss: 0.0721 - accuracy: 0.6399 - val_loss: 0.0787 - val_accuracy: 0.6062 - 47ms/epoch - 8ms/step
Epoch 396/400
6/6 - 0s - loss: 0.0715 - accuracy: 0.6461 - val_loss: 0.0771 - val_accuracy: 0.6146 - 46ms/epoch - 8ms/step
Epoch 397/400
6/6 - 0s - loss: 0.0727 - accuracy: 0.6381 - val_loss: 0.0765 - val_accuracy: 0.6187 - 60ms/epoch - 10ms/step
Epoch 398/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6345 - val_loss: 0.0777 - val_accuracy: 0.6104 - 50ms/epoch - 8ms/step
Epoch 399/400
6/6 - 0s - loss: 0.0731 - accuracy: 0.6354 - val_loss: 0.0773 - val_accuracy: 0.6146 - 47ms/epoch - 8ms/step
Epoch 400/400
6/6 - 0s - loss: 0.0724 - accuracy: 0.6408 - val_loss: 0.0771 - val_accuracy: 0.6167 - 45ms/epoch - 8ms/step
In [45]:
# predicting the model on test data
y_pred=model.predict(x_test)
15/15 [==============================] - 0s 2ms/step
In [46]:
y_pred[0]
Out[46]:
array([0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00,
       0.0000000e+00, 1.7189235e-23, 1.0000000e+00, 0.0000000e+00,
       0.0000000e+00, 0.0000000e+00], dtype=float32)
In [47]:
# Since the outputs are in probabilities we try to get the label
y_pred_final=[]
for i in y_pred:
  y_pred_final.append(np.argmax(i))
In [48]:
from sklearn.metrics import classification_report
print(classification_report(y_test,y_pred_final))
              precision    recall  f1-score   support

           3       0.00      0.00      0.00         2
           4       0.00      0.00      0.00        21
           5       0.71      0.73      0.72       207
           6       0.55      0.74      0.63       195
           7       0.00      0.00      0.00        52
           8       0.00      0.00      0.00         3

    accuracy                           0.62       480
   macro avg       0.21      0.25      0.22       480
weighted avg       0.53      0.62      0.56       480

In [49]:
from sklearn.metrics import confusion_matrix             #Confusion matrix
import seaborn as sns

cm=confusion_matrix(y_test,y_pred_final)

plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Predicted')
plt.ylabel('Truth')
plt.show()
In [50]:
import matplotlib.pyplot as plt           #plotting the accuracy and losses after each iteration


def plot_graphs(history, string):
  plt.plot(history.history[string])
  plt.plot(history.history['val_'+string])
  plt.xlabel("Epochs")
  plt.ylabel(string)
  plt.legend([string, 'val_'+string])
  plt.show()

plot_graphs(history, 'accuracy')
plot_graphs(history, 'loss')

Insights on difference between the two models:

  1. On implementing dropout we can see that the validation accuracy has improved. Also the validation losses has reduced.
  2. On using dropout each neuron get trained properly and they perform well with the test data.

Part B¶

Loading the dataset¶

In [51]:
import h5py
In [52]:
h5f = h5py.File('/content/MyDrive/MyDrive/Dataset/Autonomous_Vehicles_SVHN_single_grey1.h5', 'r')    #reading the h5py file
In [53]:
x_train = h5f['X_train'][:]       #loading the train and test data
y_train = h5f['y_train'][:]
x_test = h5f['X_test'][:]
y_test = h5f['y_test'][:]
In [54]:
h5f.close()      #closing the file

Data Preprocessing and Visualizing¶

In [55]:
print('X Train set contains {} data'.format(x_train.shape))
print('X Test set contains {} data'.format(x_test.shape))          #shapeof the train and test data
print('Y Train set contains {} data'.format(y_train.shape))
print('Y Test set contains {} data'.format(y_test.shape))
X Train set contains (42000, 32, 32) data
X Test set contains (18000, 32, 32) data
Y Train set contains (42000,) data
Y Test set contains (18000,) data
In [56]:
plt.figure(figsize=(10, 1))    #first 10 images from the dataset
for i in range(10):
    plt.subplot(1, 10, i+1)
    plt.imshow(x_train[i], cmap="gray")
    plt.axis('off')
plt.show()
print('label for each of the above image: %s' % (y_train[0:10]))
label for each of the above image: [2 6 7 4 4 0 3 0 7 3]
In [57]:
image_size = 32*32
x_train = x_train.reshape(x_train.shape[0], image_size)    #Reshaping the images
x_test = x_test.reshape(x_test.shape[0], image_size)

# normalize inputs from 0-255 to 0-1
x_train = x_train / 255.0
x_test = x_test / 255.0

print('Training set', x_train.shape, y_train.shape)
print('Test set', x_test.shape, y_test.shape)
Training set (42000, 1024) (42000,)
Test set (18000, 1024) (18000,)
In [58]:
np.unique(y_train)  #target variable labels
Out[58]:
array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], dtype=uint8)
In [59]:
num_classes = 10       #convert the target variable to one hot vectors
y_train_cat = to_categorical(y_train, num_classes)
y_test_cat=to_categorical(y_test,num_classes)
In [60]:
print("First 5 training lables as one-hot encoded vectors:\n", y_train_cat[:5])
First 5 training lables as one-hot encoded vectors:
 [[0. 0. 1. 0. 0. 0. 0. 0. 0. 0.]
 [0. 0. 0. 0. 0. 0. 1. 0. 0. 0.]
 [0. 0. 0. 0. 0. 0. 0. 1. 0. 0.]
 [0. 0. 0. 0. 1. 0. 0. 0. 0. 0.]
 [0. 0. 0. 0. 1. 0. 0. 0. 0. 0.]]
In [61]:
print("Label: ", y_train[1])                             #example
print("label:", y_train_cat[1])
plt.imshow(x_train[1].reshape(32,32), cmap='gray')
Label:  6
label: [0. 0. 0. 0. 0. 0. 1. 0. 0. 0.]
Out[61]:
<matplotlib.image.AxesImage at 0x7fc90254e140>

Model training and Evaluation¶

In [62]:
model = Sequential()

model.add(Dense(512, activation='relu',kernel_initializer='he_uniform',input_shape=(image_size,))) ###Multiple Dense layers with Relu activation
model.add(Dense(128, activation='relu',kernel_initializer='he_uniform'))

#model.add(Dense(64, activation='relu',kernel_initializer='he_uniform'))
#model.add(Dense(32, activation='relu',kernel_initializer='he_uniform'))

model.add(Dense(num_classes, activation='softmax')) ### For multiclass classification Softmax is used
In [63]:
# Compile model
adam = optimizers.Adam(lr=1e-3)
model.compile(loss=losses.categorical_crossentropy, optimizer=adam, metrics=['accuracy']) ### Loss function = Categorical cross entropy
In [64]:
model.summary() #Summary of neural network model
Model: "sequential_2"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense_10 (Dense)            (None, 512)               524800    
                                                                 
 dense_11 (Dense)            (None, 128)               65664     
                                                                 
 dense_12 (Dense)            (None, 10)                1290      
                                                                 
=================================================================
Total params: 591,754
Trainable params: 591,754
Non-trainable params: 0
_________________________________________________________________
In [65]:
from tensorflow.keras.callbacks import ModelCheckpoint, ReduceLROnPlateau            #callback
checkpoint = ModelCheckpoint("model_weights.h5",monitor='val_accuracy',
                            save_weights_only=True, mode='max',verbose=1)
reduce_lr = ReduceLROnPlateau(monitor='val_loss',factor=0.1,patience=2,min_lr=0.00001,model='auto')

callbacks = [checkpoint,reduce_lr]
In [66]:
# Fit the model
history=model.fit(x_train, y_train_cat, validation_split=0.2, epochs=30, batch_size=128, verbose=2,callbacks=callbacks)
Epoch 1/30

Epoch 1: saving model to model_weights.h5
263/263 - 1s - loss: 2.3056 - accuracy: 0.1285 - val_loss: 2.2171 - val_accuracy: 0.1961 - lr: 0.0010 - 1s/epoch - 5ms/step
Epoch 2/30

Epoch 2: saving model to model_weights.h5
263/263 - 1s - loss: 1.8350 - accuracy: 0.3657 - val_loss: 1.5143 - val_accuracy: 0.5121 - lr: 0.0010 - 799ms/epoch - 3ms/step
Epoch 3/30

Epoch 3: saving model to model_weights.h5
263/263 - 1s - loss: 1.3865 - accuracy: 0.5565 - val_loss: 1.3361 - val_accuracy: 0.5874 - lr: 0.0010 - 800ms/epoch - 3ms/step
Epoch 4/30

Epoch 4: saving model to model_weights.h5
263/263 - 1s - loss: 1.2226 - accuracy: 0.6194 - val_loss: 1.1900 - val_accuracy: 0.6194 - lr: 0.0010 - 798ms/epoch - 3ms/step
Epoch 5/30

Epoch 5: saving model to model_weights.h5
263/263 - 1s - loss: 1.1209 - accuracy: 0.6510 - val_loss: 1.0789 - val_accuracy: 0.6651 - lr: 0.0010 - 792ms/epoch - 3ms/step
Epoch 6/30

Epoch 6: saving model to model_weights.h5
263/263 - 1s - loss: 1.0561 - accuracy: 0.6720 - val_loss: 1.0878 - val_accuracy: 0.6669 - lr: 0.0010 - 793ms/epoch - 3ms/step
Epoch 7/30

Epoch 7: saving model to model_weights.h5
263/263 - 1s - loss: 0.9959 - accuracy: 0.6934 - val_loss: 0.9875 - val_accuracy: 0.7024 - lr: 0.0010 - 938ms/epoch - 4ms/step
Epoch 8/30

Epoch 8: saving model to model_weights.h5
263/263 - 1s - loss: 0.9571 - accuracy: 0.7070 - val_loss: 1.0552 - val_accuracy: 0.6844 - lr: 0.0010 - 830ms/epoch - 3ms/step
Epoch 9/30

Epoch 9: saving model to model_weights.h5
263/263 - 1s - loss: 0.9009 - accuracy: 0.7251 - val_loss: 0.9020 - val_accuracy: 0.7275 - lr: 0.0010 - 948ms/epoch - 4ms/step
Epoch 10/30

Epoch 10: saving model to model_weights.h5
263/263 - 1s - loss: 0.8773 - accuracy: 0.7325 - val_loss: 0.8980 - val_accuracy: 0.7267 - lr: 0.0010 - 1s/epoch - 4ms/step
Epoch 11/30

Epoch 11: saving model to model_weights.h5
263/263 - 1s - loss: 0.8417 - accuracy: 0.7437 - val_loss: 0.8797 - val_accuracy: 0.7369 - lr: 0.0010 - 1s/epoch - 4ms/step
Epoch 12/30

Epoch 12: saving model to model_weights.h5
263/263 - 1s - loss: 0.8199 - accuracy: 0.7510 - val_loss: 0.8385 - val_accuracy: 0.7429 - lr: 0.0010 - 1s/epoch - 5ms/step
Epoch 13/30

Epoch 13: saving model to model_weights.h5
263/263 - 1s - loss: 0.7911 - accuracy: 0.7588 - val_loss: 0.8472 - val_accuracy: 0.7410 - lr: 0.0010 - 812ms/epoch - 3ms/step
Epoch 14/30

Epoch 14: saving model to model_weights.h5
263/263 - 1s - loss: 0.7868 - accuracy: 0.7593 - val_loss: 0.8698 - val_accuracy: 0.7337 - lr: 0.0010 - 804ms/epoch - 3ms/step
Epoch 15/30

Epoch 15: saving model to model_weights.h5
263/263 - 1s - loss: 0.6863 - accuracy: 0.7952 - val_loss: 0.7484 - val_accuracy: 0.7801 - lr: 1.0000e-04 - 792ms/epoch - 3ms/step
Epoch 16/30

Epoch 16: saving model to model_weights.h5
263/263 - 1s - loss: 0.6774 - accuracy: 0.7970 - val_loss: 0.7489 - val_accuracy: 0.7810 - lr: 1.0000e-04 - 793ms/epoch - 3ms/step
Epoch 17/30

Epoch 17: saving model to model_weights.h5
263/263 - 1s - loss: 0.6734 - accuracy: 0.7980 - val_loss: 0.7439 - val_accuracy: 0.7787 - lr: 1.0000e-04 - 802ms/epoch - 3ms/step
Epoch 18/30

Epoch 18: saving model to model_weights.h5
263/263 - 1s - loss: 0.6710 - accuracy: 0.7984 - val_loss: 0.7395 - val_accuracy: 0.7852 - lr: 1.0000e-04 - 799ms/epoch - 3ms/step
Epoch 19/30

Epoch 19: saving model to model_weights.h5
263/263 - 1s - loss: 0.6672 - accuracy: 0.8007 - val_loss: 0.7395 - val_accuracy: 0.7830 - lr: 1.0000e-04 - 801ms/epoch - 3ms/step
Epoch 20/30

Epoch 20: saving model to model_weights.h5
263/263 - 1s - loss: 0.6638 - accuracy: 0.8016 - val_loss: 0.7356 - val_accuracy: 0.7854 - lr: 1.0000e-04 - 838ms/epoch - 3ms/step
Epoch 21/30

Epoch 21: saving model to model_weights.h5
263/263 - 1s - loss: 0.6613 - accuracy: 0.8040 - val_loss: 0.7372 - val_accuracy: 0.7842 - lr: 1.0000e-04 - 787ms/epoch - 3ms/step
Epoch 22/30

Epoch 22: saving model to model_weights.h5
263/263 - 1s - loss: 0.6587 - accuracy: 0.8038 - val_loss: 0.7350 - val_accuracy: 0.7854 - lr: 1.0000e-04 - 800ms/epoch - 3ms/step
Epoch 23/30

Epoch 23: saving model to model_weights.h5
263/263 - 1s - loss: 0.6549 - accuracy: 0.8057 - val_loss: 0.7352 - val_accuracy: 0.7845 - lr: 1.0000e-04 - 786ms/epoch - 3ms/step
Epoch 24/30

Epoch 24: saving model to model_weights.h5
263/263 - 1s - loss: 0.6530 - accuracy: 0.8059 - val_loss: 0.7328 - val_accuracy: 0.7832 - lr: 1.0000e-04 - 798ms/epoch - 3ms/step
Epoch 25/30

Epoch 25: saving model to model_weights.h5
263/263 - 1s - loss: 0.6508 - accuracy: 0.8060 - val_loss: 0.7287 - val_accuracy: 0.7863 - lr: 1.0000e-04 - 1s/epoch - 4ms/step
Epoch 26/30

Epoch 26: saving model to model_weights.h5
263/263 - 1s - loss: 0.6465 - accuracy: 0.8077 - val_loss: 0.7296 - val_accuracy: 0.7858 - lr: 1.0000e-04 - 1s/epoch - 4ms/step
Epoch 27/30

Epoch 27: saving model to model_weights.h5
263/263 - 1s - loss: 0.6445 - accuracy: 0.8083 - val_loss: 0.7257 - val_accuracy: 0.7857 - lr: 1.0000e-04 - 1s/epoch - 5ms/step
Epoch 28/30

Epoch 28: saving model to model_weights.h5
263/263 - 1s - loss: 0.6427 - accuracy: 0.8084 - val_loss: 0.7288 - val_accuracy: 0.7840 - lr: 1.0000e-04 - 828ms/epoch - 3ms/step
Epoch 29/30

Epoch 29: saving model to model_weights.h5
263/263 - 1s - loss: 0.6393 - accuracy: 0.8106 - val_loss: 0.7249 - val_accuracy: 0.7885 - lr: 1.0000e-04 - 784ms/epoch - 3ms/step
Epoch 30/30

Epoch 30: saving model to model_weights.h5
263/263 - 1s - loss: 0.6385 - accuracy: 0.8096 - val_loss: 0.7202 - val_accuracy: 0.7907 - lr: 1.0000e-04 - 782ms/epoch - 3ms/step
In [67]:
# predicting the model on test data
y_pred=model.predict(x_test)
563/563 [==============================] - 1s 1ms/step
In [68]:
# Since the outputs are probability values we try to get the labels
y_pred_final=[]
for i in y_pred:
  y_pred_final.append(np.argmax(i))
In [69]:
from sklearn.metrics import classification_report
print(classification_report(y_test,y_pred_final))
              precision    recall  f1-score   support

           0       0.84      0.81      0.82      1814
           1       0.79      0.82      0.81      1828
           2       0.83      0.79      0.81      1803
           3       0.74      0.73      0.73      1719
           4       0.84      0.82      0.83      1812
           5       0.69      0.79      0.74      1768
           6       0.79      0.77      0.78      1832
           7       0.81      0.83      0.82      1808
           8       0.78      0.71      0.74      1812
           9       0.75      0.77      0.76      1804

    accuracy                           0.78     18000
   macro avg       0.78      0.78      0.78     18000
weighted avg       0.79      0.78      0.78     18000

In [70]:
from sklearn.metrics import confusion_matrix            #confusion matrix
import seaborn as sns

cm=confusion_matrix(y_test,y_pred_final)

plt.figure(figsize=(10,7))
sns.heatmap(cm,annot=True,fmt='d')
plt.xlabel('Predicted')
plt.ylabel('Truth')
plt.show()
In [71]:
import matplotlib.pyplot as plt


def plot_graphs(history, string):
  plt.plot(history.history[string])
  plt.plot(history.history['val_'+string])          #plotting the accuracy and losses after each iteration
  plt.xlabel("Epochs")
  plt.ylabel(string)
  plt.legend([string, 'val_'+string])
  plt.show()

plot_graphs(history, 'accuracy')
plot_graphs(history, 'loss')

We can see that the model is not overfit. The training and validation accuracy and losses are close to each other.

In [71]: